Skip to content

Commit

Permalink
Upgrade htsjdk to v3.0.1 and picard to 2.27.5 (#8025)
Browse files Browse the repository at this point in the history
This upgrades htsjdk h was attempted in #7867 and then reverted in #7960
in order to unblock the jukebox merge.

* upgrade htsjdk 2.24.1 -> 3.0.1
* upgrade picard 2.27.1 -> 2.27.5

Co-authored-by: David Roazen <[email protected]>
  • Loading branch information
lbergelson and droazen authored Oct 12, 2022
1 parent 19778c1 commit 8dbb78f
Show file tree
Hide file tree
Showing 9 changed files with 26 additions and 24 deletions.
4 changes: 2 additions & 2 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ repositories {
mavenLocal()
}

final htsjdkVersion = System.getProperty('htsjdk.version','2.24.1')
final picardVersion = System.getProperty('picard.version','2.27.1')
final htsjdkVersion = System.getProperty('htsjdk.version','3.0.1')
final picardVersion = System.getProperty('picard.version','2.27.5')
final barclayVersion = System.getProperty('barclay.version','4.0.2')
final sparkVersion = System.getProperty('spark.version', '2.4.5')
final scalaVersion = System.getProperty('scala.version', '2.11')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import de.javakaffee.kryoserializers.guava.ImmutableMapSerializer;
import htsjdk.samtools.*;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.SimpleAllele;
import htsjdk.variant.vcf.VCFHeaderLineType;
import htsjdk.variant.vcf.VCFInfoHeaderLine;
import org.apache.spark.serializer.KryoRegistrator;
Expand Down Expand Up @@ -65,13 +66,13 @@ public VCFInfoHeaderLine newInstance() {
return new VCFInfoHeaderLine("TMP", 2, VCFHeaderLineType.String, "");
}
});
registration = kryo.register(Allele.class);
registration = kryo.register(SimpleAllele.class);
registration.setInstantiator(new ObjectInstantiator<Allele>() {
public Allele newInstance() {
return Allele.create("TCGA");
}
});
}
}

@Override
public void registerClasses(Kryo kryo) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
package org.broadinstitute.hellbender.tools.funcotator;

import htsjdk.tribble.annotation.Strand;
import htsjdk.variant.variantcontext.SimpleAllele;

/**
* Class to represent a strand-corrected {@link htsjdk.variant.variantcontext.Allele}.
* Created by jonn on 10/24/18.
*/
public class StrandCorrectedAllele extends htsjdk.variant.variantcontext.Allele {
public class StrandCorrectedAllele extends SimpleAllele {
public static final long serialVersionUID = 1L;

/**
Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,21 @@
package org.broadinstitute.hellbender.tools.walkers.haplotypecaller;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.SimpleAllele;

/**
* This class is similar to {@link org.broadinstitute.hellbender.tools.walkers.haplotypecaller.LocationAndAlleles} but
* allows keeping only an allele/ref pair rather than a list of alleles. The comparison is done on allele by allele basis and
* not in the way it is done on LocationAndAlleles
*/

public class AlleleAndContext extends Allele {
public class AlleleAndContext extends SimpleAllele {
final static public long serialVersionUID = 1L;
private final int loc;
private final String contig;
private final Allele refAllele;

public AlleleAndContext(final String contig, final int loc, final Allele allele, final Allele refAllele) {
super(allele, false);
super(allele.getBases(), allele.isReference());
this.loc = loc;
this.contig = contig;
this.refAllele = refAllele;
Expand Down Expand Up @@ -42,7 +44,7 @@ public boolean equals(final Object o) {

@Override
public int hashCode() {
return 31 * loc + (this != null ? super.hashCode() : 0);
return 31 * loc + super.hashCode();
}

public String toString() {return String.format("(%d) %s/%s", loc, getBaseString(), getRefAllele().getBaseString());}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -415,11 +415,11 @@ public static AssemblyResultSet assembleReads(final AssemblyRegion region,
* Handle pileup detected alternate alleles.
*/
@VisibleForTesting
@SuppressWarnings("deprecation")
static void processPileupAlleles(final AssemblyRegion region, final List<VariantContext> givenAlleles, final int maxMnpDistance,
final int snpAdjacentToIndelLimit, final SmithWatermanAligner aligner, final Haplotype refHaplotype,
final AssemblyResultSet assemblyResultSet, final int numHaplotypesPerIteration, final int hapFilteringKmerSize,
final SWParameters haplotypeToReferenceSWParameters) {
final int assemblyRegionStart = region.getPaddedSpan().getStart();
final int activeRegionStart = refHaplotype.getAlignmentStartHapwrtRef();
final Map<Integer, VariantContext> assembledVariants = assemblyResultSet.getVariationEvents(maxMnpDistance).stream()
.collect(Collectors.groupingBy(VariantContext::getStart, Collectors.collectingAndThen(Collectors.toList(), AssemblyBasedCallerUtils::makeMergedVariantContext)));
Expand Down Expand Up @@ -496,10 +496,10 @@ public static void addGivenAlleles(final List<VariantContext> givenAlleles, fina
final Allele longerRef = (assembledVC == null || givenVCRefLength > assembledVC.getReference().length()) ? givenVC.getReference() : assembledVC.getReference();
final List<Allele> unassembledGivenAlleles = getAllelesNotPresentInAssembly(givenVC, assembledVC, givenVCRefLength, longerRef);

final List<Allele> unassembledNonSymbolicAlleles = unassembledGivenAlleles.stream().filter(a -> {
final byte[] bases = a.getBases();
return !(Allele.wouldBeNoCallAllele(bases) || Allele.wouldBeNullAllele(bases) || Allele.wouldBeStarAllele(bases) || Allele.wouldBeSymbolicAllele(bases));
}).collect(Collectors.toList());
final List<Allele> unassembledNonSymbolicAlleles = unassembledGivenAlleles.stream()
//TODO, update the null allele check when htsjdk adds a NULL_ALLELE constant to Allele
.filter(a -> !(a.equals(Allele.NO_CALL) || a.getDisplayString().equals(String.valueOf(VCFConstants.NULL_ALLELE)) || a.equals(Allele.SPAN_DEL) || a.isSymbolic()))
.collect(Collectors.toList());

// choose the highest-scoring haplotypes along with the reference for building force-calling haplotypes
final List<Haplotype> baseHaplotypes = unassembledNonSymbolicAlleles.isEmpty() ? Collections.emptyList() : assembledHaplotypes.stream()
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package org.broadinstitute.hellbender.tools.walkers.haplotypecaller.graphs;

import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.SimpleAllele;

/**
* Utility class for defining a "not" allele concept that is used to score haplotypes that are not supporting the allele.
Expand All @@ -10,15 +11,14 @@
* @author Ilya Soifer &lt;[email protected]
*/

public class InverseAllele extends Allele {
public class InverseAllele extends SimpleAllele {
final static public long serialVersionUID = 1L;

private final Allele internalAllele;
private final boolean referenceStatus;

private InverseAllele(final Allele allele, boolean isReference) {
super(allele, false);
super(allele.getBases(), isReference);
this.internalAllele = allele;
referenceStatus = isReference;
}

// InverseAllele of inverseAllele. By definition it is the allele. In Allele filtering code we normally genotype
Expand All @@ -35,10 +35,6 @@ public static Allele of(final Allele allele, boolean refFlag){
public byte [] getBases(){
return getDisplayString().getBytes();
}
@Override
public boolean isReference() {
return referenceStatus;
}

@Override
public boolean isSymbolic() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import htsjdk.samtools.Cigar;
import htsjdk.samtools.util.Locatable;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.SimpleAllele;
import org.apache.commons.lang3.ArrayUtils;
import org.broadinstitute.hellbender.utils.read.FlowBasedKeyCodec;
import org.broadinstitute.hellbender.utils.read.FlowBasedReadUtils;
Expand All @@ -11,7 +12,7 @@
* Haplotype that also keeps information on the flow space @see FlowBasedRead
* Haplotype can't be extended, so this extends Allele
*/
public class FlowBasedHaplotype extends Allele {
public class FlowBasedHaplotype extends SimpleAllele {
private static final long serialVersionUID = 42L;
private int [] key;
private int [] rKey;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import htsjdk.samtools.CigarOperator;
import htsjdk.samtools.util.Locatable;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.SimpleAllele;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.broadinstitute.hellbender.utils.SimpleInterval;
Expand All @@ -17,7 +18,7 @@
import java.util.Arrays;
import java.util.Comparator;

public final class Haplotype extends Allele {
public final class Haplotype extends SimpleAllele {
private static final long serialVersionUID = 1L;

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ public File getOutputDir() {
}

private void setOutputDir() {
this.outputDir = IOUtil.createTempDir(this.getClass().getSimpleName() + ".", ".tmp");
this.outputDir = IOUtil.createTempDir(this.getClass().getSimpleName() + ".tmp").toFile();
if (deleteOnExit) {
outputDir.deleteOnExit();
}
Expand Down

0 comments on commit 8dbb78f

Please sign in to comment.