Package org.broadinstitute.gatk.utils.sam

Examples of org.broadinstitute.gatk.utils.sam.GATKSAMRecord


        public byte[] getRefBases() { return ref.getBytes(); }
        public int getRefLength() { return ref.length(); }

        public GATKSAMRecord makeRead(final int start, final int length) {
            final byte[] quals = Utils.dupBytes((byte)30, length);
            final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read " + readCounter++, 0, start + getStart(), ref.substring(start, start + length).getBytes(), quals, length + "M");
            read.setReadGroup(rg);
            return read;
        }
View Full Code Here


    }

    @Test
    public void testUnmappedReadsDoNotFail() {
        // create an unmapped read
        final GATKSAMRecord read = new GATKSAMRecord(ArtificialSAMUtils.createArtificialSamHeader());
        read.setReadName("foo");
        read.setReferenceName("*");
        read.setAlignmentStart(100);
        read.setCigarString("*");
        read.setReadUnmappedFlag(true);

        // try to add it to the manager
        final OverhangFixingManager manager = new OverhangFixingManager(null, null, null, 100, 1, 30, false);
        manager.addRead(read); // we just want to make sure that the following call does not fail
        Assert.assertTrue(true);
View Full Code Here

        final byte[] quals = new byte[READ_LENGTH];

        Arrays.fill(quals,(byte)30);

        for (int k=0; k < NUM_GOOD_READS; k++) {
            final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases.getBytes(), quals,READ_LENGTH+"M");
            finalizedReadList.add(read);
        }

        ReadErrorCorrector readErrorCorrector = new ReadErrorCorrector(kmerLengthForReadErrorCorrection,(byte)6,10, debug,refChunkHard.getBytes());
        readErrorCorrector.addReadsToKmers(finalizedReadList);
View Full Code Here

        Arrays.fill(quals,(byte)30);

        for (int k=0; k < NUM_GOOD_READS; k++) {
            final byte[] bases = Arrays.copyOfRange(refChunk.getBytes(),offset,offset+READ_LENGTH);
            final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals,READ_LENGTH+"M");
            finalizedReadList.add(read);
            offset++;
            if (offset >= refChunk.length()-READ_LENGTH)
                offset = 0;
        }
        offset = 2;
        // coverage profile is now perfectly triangular with "good" bases. Inject now bad bases with errors in them.
        for (int k=0; k < NUM_BAD_READS; k++) {
            final byte[] bases = finalizedReadList.get(k).getReadBases().clone();
            bases[offset] = 'N';
            final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, READ_LENGTH + "M");
            finalizedReadList.add(read);
            offset += 7;
            if (offset >= READ_LENGTH)
                offset = 4; // just some randomly circulating offset for error position
        }
View Full Code Here

                final int aIndex = subject.alleleIndex(allele);
                Assert.assertEquals(aIndex >= 0,alleleWithLikelihoodsSet.contains(allele));
                Assert.assertTrue(aIndex < alleleCount);
                if (aIndex == -1) continue;
                for (int r = 0; r < sampleReadCount; r++) {
                    final GATKSAMRecord read = sampleToReads.get(sample).get(r);
                    final int rIndex = subject.readIndex(sIndex,read);
                    final int rIndex2 = sampleLikelihoods.readIndex(read);
                    Assert.assertEquals(rIndex,rIndex2);
                    Assert.assertEquals(rIndex >= 0,readsWithLikelihoodsSet.contains(read));
                    Assert.assertTrue(rIndex < sampleReadCount);
View Full Code Here

            final ReadAnchoring anchoring = new ReadAnchoring(r,haplotypeGraph);
            if (anchoring.isAnchoredSomewhere())
                continue;
            final int start = anchoring.leftAnchorIndex;
            final int end = anchoring.rightAnchorIndex + haplotypeGraph.getKmerSize();
            final GATKSAMRecord clipped = new ClippedGATKSAMRecord(r, start, end);
            result.put(r, clipped);
        }
        return result;
    }
View Full Code Here

                addBaseErrors(readBases, phredScaledErrorRate);

            byte[] readQuals = new byte[readBases.length];
            Arrays.fill(readQuals, (byte)phredScaledErrorRate);

            GATKSAMRecord read = new GATKSAMRecord(header);
            read.setBaseQualities(readQuals);
            read.setReadBases(readBases);
            read.setReadName(artificialReadName+readCounter++);

            boolean isBeforeDeletion = alleleLength<refAlleleLength;
            boolean isBeforeInsertion = alleleLength>refAlleleLength;

            int eventLength = alleleLength - refAlleleLength;
            if (isReference)
                read.setCigarString(readBases.length + "M");
            else {
                if (isBeforeDeletion || isBeforeInsertion)
                    read.setCigarString((readOffset+1)+"M"+ Math.abs(eventLength) + (isBeforeDeletion?"D":"I") +
                            (readBases.length-readOffset)+"M");
                else // SNP case
                    read.setCigarString(readBases.length+"M");
            }

            read.setReadPairedFlag(false);
            read.setAlignmentStart(readStart);
            read.setMappingQuality(artificialMappingQuality);
            read.setReferenceName(loc.getContig());
            read.setReadNegativeStrandFlag(false);
            read.setReadGroup(sampleRG(sample));

            pileupElements.add(LocusIteratorByState.createPileupForReadAndOffset(read, readOffset));
        }

        return pileupElements;
View Full Code Here

        for ( final Map.Entry<GATKSAMRecord, Map<Allele,Double>> el : likelihoodMap.getLikelihoodReadMap().entrySet() ) {
            final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue());
            if ( ! a.isInformative() )
                continue; // read is non-informative

            final GATKSAMRecord read = el.getKey();
            if ( isUsableRead(read, refLoc) ) {
                final Double value = getElementForRead(read, refLoc, a);
                if ( value == null )
                    continue;
View Full Code Here

        int nextIndex = 0;
        for (final PileupElement p : pileup) {
            final byte qual = p.isDeletion() ? REF_MODEL_DELETION_QUAL : p.getQual();
            if (!p.isDeletion() && qual <= minBaseQual)
                continue;
            final GATKSAMRecord read = p.getRead();
            reads.add(read);
            final boolean isAlt = p.getBase() != refBase || p.isDeletion() || p.isBeforeDeletionStart()
                    || p.isAfterDeletionEnd() || p.isBeforeInsertion() || p.isAfterInsertion() || p.isNextToSoftClip();
            final int bestAllele;
            final int worstAllele;
View Full Code Here

     * @return an integer >= 0
     */
    protected final int calcNIndelInformativeReads(final ReadBackedPileup pileup, final int pileupOffsetIntoRef, final byte[] ref, final int maxIndelSize) {
        int nInformative = 0;
        for ( final PileupElement p : pileup ) {
            final GATKSAMRecord read = p.getRead();
            final int offset = p.getOffset();

            // doesn't count as evidence
            if ( p.isBeforeDeletionStart() || p.isBeforeInsertion() || p.isDeletion() )
                continue;

            // todo -- this code really should handle CIGARs directly instead of relying on the above tests
            if ( isReadInformativeAboutIndelsOfSize(read.getReadBases(), read.getBaseQualities(), offset, ref, pileupOffsetIntoRef, maxIndelSize) ) {
                nInformative++;
                if( nInformative > MAX_N_INDEL_INFORMATIVE_READS ) {
                    return MAX_N_INDEL_INFORMATIVE_READS;
                }
            }
View Full Code Here

TOP

Related Classes of org.broadinstitute.gatk.utils.sam.GATKSAMRecord

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.