public void add(Key<?> key,State state,Object source) {
if (backingMap == null) {
backingMap = Maps.newHashMap();
}
// if it's an instanceof Class,it was a JIT binding,which we don't
// want to retain.
if (source instanceof Class || source == SourceProvider.UNKNowN_SOURCE) {
source = null;
}
Multiset<Object> sources = backingMap.get(key);
if (sources == null) {
sources = LinkedHashMultiset.create();
backingMap.put(key,sources);
}
Object convertedSource = Errors.convert(source);
sources.add(convertedSource);
// Avoid all the extra work if we can.
if (state.parent() != State.NONE) {
Set<KeyAndSource> keyAndSources = evictionCache.getIfPresent(state);
if (keyAndSources == null) {
evictionCache.put(state,keyAndSources = Sets.newHashSet());
}
keyAndSources.add(new KeyAndSource(key,convertedSource));
}
}
项目:java_in_examples
文件:GuavaLinkedHashMultisetTest.java
public static void main(String[] args) {
// Parse text to separate words
String INPUT_TEXT = "Hello World! Hello All! Hi World!";
// Create Multiset
Multiset<String> multiset = LinkedHashMultiset.create(Arrays.asList(INPUT_TEXT.split(" ")));
// Print count words
System.out.println(multiset); // print [Hello x 2,World! x 2,All!,Hi]- in predictable iteration order
// Print all unique words
System.out.println(multiset.elementSet()); // print [Hello,World!,Hi] - in predictable iteration order
// Print count occurrences of words
System.out.println("Hello = " + multiset.count("Hello")); // print 2
System.out.println("World = " + multiset.count("World!")); // print 2
System.out.println("All = " + multiset.count("All!")); // print 1
System.out.println("Hi = " + multiset.count("Hi")); // print 1
System.out.println("Empty = " + multiset.count("Empty")); // print 0
// Print count all words
System.out.println(multiset.size()); //print 6
// Print count unique words
System.out.println(multiset.elementSet().size()); //print 4
}
项目:java_in_examples
文件:GuavaLinkedHashMultisetTest.java
public static void main(String[] args) {
// Разберем текст на слова
String INPUT_TEXT = "Hello World! Hello All! Hi World!";
// Создаем Multiset
Multiset<String> multiset = LinkedHashMultiset.create(Arrays.asList(INPUT_TEXT.split(" ")));
// Выводим кол-вом вхождений слов
System.out.println(multiset); // напечатает [Hello x 2,Hi]- в порядке первого добавления элемента
// Выводим все уникальные слова
System.out.println(multiset.elementSet()); // напечатает [Hello,Hi] - в порядке первого добавления элемента
// Выводим количество по каждому слову
System.out.println("Hello = " + multiset.count("Hello")); // напечатает 2
System.out.println("World = " + multiset.count("World!")); // напечатает 2
System.out.println("All = " + multiset.count("All!")); // напечатает 1
System.out.println("Hi = " + multiset.count("Hi")); // напечатает 1
System.out.println("Empty = " + multiset.count("Empty")); // напечатает 0
// Выводим общее количества всех слов в тексте
System.out.println(multiset.size()); //напечатает 6
// Выводим общее количество всех уникальных слов
System.out.println(multiset.elementSet().size()); //напечатает 4
}
/**
* Collect the file paths of the variation points of the selected elements.
*
* Each variation point is registered only once,even if several variants of the same variation
* point have been selected.
*
* @param selection
* The current selection
* @return The multi-set of variation point file paths.
*/
private LinkedHashMultiset<String> collectFilePaths(IStructuredSelection selection) {
LinkedHashMultiset<String> filePaths = LinkedHashMultiset.create();
Set<VariationPoint> vps = Sets.newLinkedHashSet();
for (Object selectedItem : selection.toList()) {
if (selectedItem instanceof Variant) {
vps.add(((Variant) selectedItem).getvariationPoint());
} else if (selectedItem instanceof VariationPoint) {
vps.add((VariationPoint) selectedItem);
}
}
for (VariationPoint vp : vps) {
if (vp != null) {
filePaths.add(getFile(vp));
}
}
return filePaths;
}
项目:guice
文件:WeakKeySet.java
public void add(Key<?> key,convertedSource));
}
}
项目:gwt-jackson
文件:MultisetGwtTest.java
public void testSerialization() {
BeanWithMultisetTypes bean = new BeanWithMultisetTypes();
List<String> list = Arrays.asList( "foo","abc",null,"abc" );
List<String> listWithNonNull = Arrays.asList( "foo","bar","abc" );
bean.multiset = LinkedHashMultiset.create( list );
bean.hashMultiset = HashMultiset.create( Arrays.asList( "abc","abc" ) );
bean.linkedHashMultiset = LinkedHashMultiset.create( list );
bean.sortedMultiset = TreeMultiset.create( listWithNonNull );
bean.treeMultiset = TreeMultiset.create( listWithNonNull );
bean.immutableMultiset = ImmutableMultiset.copyOf( listWithNonNull );
bean.enumMultiset = EnumMultiset.create( Arrays.asList( AlphaEnum.B,AlphaEnum.A,AlphaEnum.D,AlphaEnum.A ) );
String expected = "{" +
"\"multiset\":[\"foo\",\"abc\",null]," +
"\"hashMultiset\":[\"abc\",\"abc\"]," +
"\"linkedHashMultiset\":[\"foo\"," +
"\"sortedMultiset\":[\"abc\",\"bar\",\"foo\"]," +
"\"treeMultiset\":[\"abc\"," +
"\"immutableMultiset\":[\"foo\",\"bar\"]," +
"\"enumMultiset\":[\"A\",\"A\",\"B\",\"D\"]" +
"}";
assertEquals( expected,BeanWithMultisetTypesMapper.INSTANCE.write( bean ) );
}
项目:gwt-jackson
文件:MultisetGwtTest.java
public void testDeserialization() {
String input = "{" +
"\"multiset\":[\"foo\"," +
"\"sortedMultiset\":[\"foo\"," +
"\"treeMultiset\":[\"bar\",\"foo\"," +
"\"enumMultiset\":[\"B\",\"D\",null]" +
"}";
BeanWithMultisetTypes result = BeanWithMultisetTypesMapper.INSTANCE.read( input );
assertNotNull( result );
List<String> expectedList = Arrays.asList( "foo","abc" );
List<String> expectedListWithNonNull = Arrays.asList( "foo","abc" );
assertEquals( LinkedHashMultiset.create( expectedList ),result.multiset );
assertEquals( HashMultiset.create( Arrays.asList( "abc","abc" ) ),result.hashMultiset );
assertEquals( LinkedHashMultiset.create( expectedList ),result.linkedHashMultiset );
assertEquals( TreeMultiset.create( expectedListWithNonNull ),result.sortedMultiset );
assertEquals( TreeMultiset.create( expectedListWithNonNull ),result.treeMultiset );
assertEquals( ImmutableMultiset.copyOf( expectedListWithNonNull ),result.immutableMultiset );
assertEquals( EnumMultiset.create( Arrays.asList( AlphaEnum.B,AlphaEnum.A ) ),result.enumMultiset );
}
public CSSContext() {
selectors = LinkedHashMultiset.create();
classes = LinkedHashMultiset.create();
javaClasses = LinkedHashMultiset.create();
ids = LinkedHashMultiset.create();
states = LinkedHashMultiset.create();
entries = LinkedListMultimap.create();
paints = LinkedListMultimap.create();
}
/**
* Keeps the cumulated distance for all the common raw super types of the given references.
* Interfaces that are more directly implemented will get a lower total count than more general
* interfaces.
*/
protected void cumulatedistance(final List<LightweightTypeReference> references,Multimap<JvmType,LightweightTypeReference> all,Multiset<JvmType> cumulateddistance) {
for(LightweightTypeReference other: references) {
Multiset<JvmType> otherdistance = LinkedHashMultiset.create();
initializedistance(other,all,otherdistance);
cumulateddistance.retainAll(otherdistance);
for(Multiset.Entry<JvmType> typetodistance: otherdistance.entrySet()) {
if (cumulateddistance.contains(typetodistance.getElement()))
cumulateddistance.add(typetodistance.getElement(),typetodistance.getCount());
}
}
}
项目:registry
文件:Schema.java
private static Multiset<Field> parseArray(List<Object> array) throws ParserException {
Multiset<Field> members = LinkedHashMultiset.create();
for(Object member: array) {
members.add(parseField(null,member));
}
return members;
}
项目:welshare
文件:TwitterService.java
private void fillStats(List<Status> list,Map<DateMidnight,Integer> map,DateMidnight start,DateMidnight end) {
LinkedList<Status> linkedList = new LinkedList<Status>(list);
Iterator<Status> iterator = linkedList.descendingIterator();
if (!iterator.hasNext()) {
return;
}
Multiset<DateMidnight> data = LinkedHashMultiset.create();
Status currentStatus = iterator.next();
DateMidnight current = new DateMidnight(currentStatus.getCreatedAt());
while (iterator.hasNext() || currentStatus != null) {
DateMidnight msgTime = new DateMidnight(currentStatus.getCreatedAt());
if (current.equals(msgTime)) {
data.add(current);
if (iterator.hasNext()) {
currentStatus = iterator.next();
} else {
currentStatus = null;
}
} else {
current = current.plusDays(1);
}
}
for (DateMidnight dm = start; !dm.isAfter(end); dm = dm.plusDays(1)) {
map.put(dm,data.count(dm));
}
}
项目:SPLevo
文件:JaMoPPProgramDependencyVPMAnalyzer.java
/**
* Identify the dependencies between the variation points based on referring and referred
* elements.<br>
* Build and return an edge descriptor for each of those pairs.
*
* @param referenceSelector
* The selector for the references to consider.
* @param index
* The index containing prevIoUsly identified element references.
* @return The list of identified edge descriptors.
*/
private List<VPMEdgeDescriptor> identifyDependencies(ReferenceSelector referenceSelector,VPReferenceIndex index) {
List<VPMEdgeDescriptor> edges = Lists.newArrayList();
List<String> edgeRegistry = new ArrayList<String>();
Multiset<DependencyType> statistics = LinkedHashMultiset.create();
for (Commentable element : index.referencedElementsIndex.keySet()) {
List<VPMEdgeDescriptor> vpEdges = identifyRelatedVPsForReferencedElement(edgeRegistry,element,referenceSelector,index,statistics);
edges.addAll(vpEdges);
}
printStatistics(statistics);
return edges;
}
@Override
public Object execute(ExecutionEvent event) throws ExecutionException {
ISelection curSelection = HandlerUtil.getCurrentSelection(event);
if (curSelection == null || !(curSelection instanceof IStructuredSelection)) {
return null;
}
IStructuredSelection selection = (IStructuredSelection) curSelection;
LinkedHashMultiset<String> filePaths = collectFilePaths(selection);
String clipboardContent = buildClipboardContent(filePaths);
copyToClipboard(clipboardContent);
return null;
}
private String buildClipboardContent(LinkedHashMultiset<String> filePaths) {
StringBuilder copyContent = new StringBuilder();
for (String file : filePaths.elementSet()) {
copyContent.append(FilenameUtils.getName(file));
copyContent.append(",");
copyContent.append(filePaths.count(file));
copyContent.append("\r\n");
}
return copyContent.toString();
}
项目:guice-old
文件:WeakKeySet.java
public void add(Key<?> key,Object source) {
if (backingSet == null) {
backingSet = Maps.newHashMap();
}
// if it's an instanceof Class,which we don't
// want to retain.
if (source instanceof Class || source == SourceProvider.UNKNowN_SOURCE) {
source = null;
}
Object mapKey = toMapKey(key);
Multiset<Object> sources = backingSet.get(mapKey);
if (sources == null) {
sources = LinkedHashMultiset.create();
backingSet.put(mapKey,keyAndSources = Sets.newHashSet());
}
keyAndSources.add(new KeyAndSource(mapKey,convertedSource));
}
}
/**
* Creates a random route planner using the specified random seed.
* @param seed The random seed.
*/
public RandomroutePlanner(long seed) {
LOGGER.info("constructor {}",seed);
assignedParcels = LinkedHashMultiset.create();
current = Optional.absent();
rng = new RandomAdaptor(new MersenneTwister(seed));
}
项目:guava-mock
文件:FreshValueGenerator.java
@Generates private static <E> LinkedHashMultiset<E> generateLinkedHashMultiset(E freshElement) {
LinkedHashMultiset<E> multiset = LinkedHashMultiset.create();
multiset.add(freshElement);
return multiset;
}
项目:guava-mock
文件:FreshValueGeneratorTest.java
public void testLinkedHashMultiset() {
assertFreshInstance(new Typetoken<LinkedHashMultiset<String>>() {});
}
@Generates private static <E> LinkedHashMultiset<E> generateLinkedHashMultiset(E freshElement) {
LinkedHashMultiset<E> multiset = LinkedHashMultiset.create();
multiset.add(freshElement);
return multiset;
}
public void testLinkedHashMultiset() {
assertFreshInstance(new Typetoken<LinkedHashMultiset<String>>() {});
}
项目:FreeBuilder
文件:MultisetProperty.java
@Override
public void addBuilderFieldDeclaration(SourceBuilder code) {
code.addLine("private final %1$s<%2$s> %3$s = %1$s.create();",LinkedHashMultiset.class,elementType,property.getField());
}
项目:guava-libraries
文件:FreshValueGenerator.java
@Generates private static <E> LinkedHashMultiset<E> generateLinkedHashMultiset(E freshElement) {
LinkedHashMultiset<E> multiset = LinkedHashMultiset.create();
multiset.add(freshElement);
return multiset;
}
项目:guava-libraries
文件:FreshValueGeneratorTest.java
public void testLinkedHashMultiset() {
assertFreshInstance(new Typetoken<LinkedHashMultiset<String>>() {});
}
项目:cleartk
文件:SumBasicModel.java
public TermFrequencyMap() {
this.termFrequencies = LinkedHashMultiset.create();
}
项目:cleartk
文件:TfidfExtractor.java
public IDFMap() {
this.documentFreqMap = LinkedHashMultiset.create();
this.totalDocumentCount = 0;
}
项目:welshare
文件:MessageServiceImpl.java
@Override
@sqlReadonlyTransactional
public WelshareStats getStats(User user) {
WelshareStats stats = new WelshareStats();
List<Message> messages = getDao().getUserMessages(user,200,0);
if (messages.isEmpty()) {
return stats;
}
LinkedList<Message> linkedList = new LinkedList<Message>(messages);
Iterator<Message> iterator = linkedList.descendingIterator();
Multiset<DateMidnight> messagesData = LinkedHashMultiset.create();
Multiset<DateMidnight> likesData = LinkedHashMultiset.create();
Multiset<DateMidnight> repliesData = LinkedHashMultiset.create();
Message currentMessage = iterator.next();
DateMidnight current = new DateMidnight(currentMessage.getDateTime());
DateMidnight start = current;
while (iterator.hasNext() || currentMessage != null) {
// skip imported messages
DateMidnight msgTime = new DateMidnight(currentMessage.getDateTime());
if (current.equals(msgTime)) {
if (!currentMessage.isImported()) {
messagesData.add(current);
likesData.add(current,currentMessage.getscore());
repliesData.add(current,currentMessage.getReplies());
}
if (iterator.hasNext()) {
currentMessage = iterator.next();
} else {
currentMessage = null;
}
} else {
current = current.plusDays(1);
}
}
DateMidnight end = current;
if (Days.daysBetween(start,end).getDays() > 30) {
start = end.minusDays(30);
}
for (DateMidnight dm = start; !dm.isAfter(end); dm = dm.plusDays(1)) {
stats.getMessages().put(dm,messagesData.count(dm));
stats.getReplies().put(dm,repliesData.count(dm));
stats.getLikes().put(dm,likesData.count(dm));
}
int days = Days.daysBetween(start,end).getDays();
if (days == 0) {
return stats; // no further calculation
}
int[] messagesMaxAndSum = CollectionUtils.getMaxAndSum(stats.getMessages());
stats.setMaxMessages(messagesMaxAndSum[0]);
stats.setAverageMessages(messagesMaxAndSum[1] / days);
int[] likesMaxAndSum = CollectionUtils.getMaxAndSum(stats.getLikes());
stats.setMaxLikes(likesMaxAndSum[0]);
stats.setAverageLikes(likesMaxAndSum[1] / days);
int[] repliesMaxAndSum = CollectionUtils.getMaxAndSum(stats.getReplies());
stats.setMaxReplies(repliesMaxAndSum[0]);
stats.setAverageReplies(repliesMaxAndSum[1] / days);
stats.setMaxCount(NumberUtils.max(
stats.getMaxMessages(),stats.getMaxReplies(),stats.getMaxLikes()));
return stats;
}
@Override
public String getMessage() {
Multiset<String> multiset = LinkedHashMultiset.create();
exceptions.forEach(e -> multiset.add(e.getMessage() == null ? e.getClass().getName() : e.getMessage()));
return "Failed after " + exceptions.size() + " tries: " + multiset;
}
项目:guava
文件:FreshValueGenerator.java
@Generates
private static <E> LinkedHashMultiset<E> generateLinkedHashMultiset(E freshElement) {
LinkedHashMultiset<E> multiset = LinkedHashMultiset.create();
multiset.add(freshElement);
return multiset;
}
项目:guava
文件:FreshValueGeneratorTest.java
public void testLinkedHashMultiset() {
assertFreshInstance(new Typetoken<LinkedHashMultiset<String>>() {});
}
项目:BfROpenLab
文件:PmmUtils.java
public static <T> T getMaxCounted(Collection<T> values) {
return Collections.max(LinkedHashMultiset.create(values).entrySet(),(o1,o2) -> Integer.compare(o1.getCount(),o2.getCount())).getElement();
}
项目:gwt-jackson
文件:LinkedHashMultisetJsonDeserializer.java
@Override
protected LinkedHashMultiset<T> newCollection() {
return LinkedHashMultiset.create();
}
项目:gwt-jackson
文件:MultisetJsonDeserializer.java
@Override
protected Multiset<T> newCollection() {
return LinkedHashMultiset.create();
}
@Override
protected void configure() {
type( Optional.class ).serializer( OptionalJsonSerializer.class ).deserializer( OptionalJsonDeserializer.class );
type( FluentIterable.class ).serializer( IterableJsonSerializer.class );
// Immutable Collections
type( ImmutableCollection.class ).serializer( CollectionjsonSerializer.class )
.deserializer( ImmutableCollectionjsonDeserializer.class );
type( ImmutableList.class ).serializer( CollectionjsonSerializer.class ).deserializer( ImmutableListJsonDeserializer.class );
type( ImmutableSet.class ).serializer( CollectionjsonSerializer.class ).deserializer( ImmutableSetJsonDeserializer.class );
type( ImmutableSortedSet.class ).serializer( CollectionjsonSerializer.class )
.deserializer( ImmutableSortedSetJsonDeserializer.class );
// Immutable Map
type( ImmutableMap.class ).serializer( MapJsonSerializer.class ).deserializer( ImmutableMapJsonDeserializer.class );
type( ImmutableSortedMap.class ).serializer( MapJsonSerializer.class ).deserializer( ImmutableSortedMapJsonDeserializer.class );
// BiMap
type( BiMap.class ).serializer( MapJsonSerializer.class ).deserializer( BiMapJsonDeserializer.class );
type( ImmutableBiMap.class ).serializer( MapJsonSerializer.class ).deserializer( ImmutableBiMapJsonDeserializer.class );
type( HashBiMap.class ).serializer( MapJsonSerializer.class ).deserializer( HashBiMapJsonDeserializer.class );
type( EnumBiMap.class ).serializer( MapJsonSerializer.class ).deserializer( EnumBiMapJsonDeserializer.class );
type( EnumHashBiMap.class ).serializer( MapJsonSerializer.class ).deserializer( EnumHashBiMapJsonDeserializer.class );
// Multiset
type( Multiset.class ).serializer( CollectionjsonSerializer.class ).deserializer( MultisetJsonDeserializer.class );
type( HashMultiset.class ).serializer( CollectionjsonSerializer.class ).deserializer( HashMultisetJsonDeserializer.class );
type( LinkedHashMultiset.class ).serializer( CollectionjsonSerializer.class )
.deserializer( LinkedHashMultisetJsonDeserializer.class );
type( SortedMultiset.class ).serializer( CollectionjsonSerializer.class ).deserializer( SortedMultisetJsonDeserializer.class );
type( TreeMultiset.class ).serializer( CollectionjsonSerializer.class ).deserializer( TreeMultisetJsonDeserializer.class );
type( ImmutableMultiset.class ).serializer( CollectionjsonSerializer.class )
.deserializer( ImmutableMultisetJsonDeserializer.class );
type( EnumMultiset.class ).serializer( CollectionjsonSerializer.class ).deserializer( EnumMultisetJsonDeserializer.class );
// Multimap
type( Multimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( MultimapJsonDeserializer.class );
type( ImmutableMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( ImmutableMultimapJsonDeserializer.class );
type( ImmutableSetMultimap.class ).serializer( MultimapJsonSerializer.class )
.deserializer( ImmutableSetMultimapJsonDeserializer.class );
type( ImmutableListMultimap.class ).serializer( MultimapJsonSerializer.class )
.deserializer( ImmutableListMultimapJsonDeserializer.class );
type( SetMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( SetMultimapJsonDeserializer.class );
type( HashMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( HashMultimapJsonDeserializer.class );
type( LinkedHashMultimap.class ).serializer( MultimapJsonSerializer.class )
.deserializer( LinkedHashMultimapJsonDeserializer.class );
type( SortedSetMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( SortedSetMultimapJsonDeserializer.class );
type( TreeMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( TreeMultimapJsonDeserializer.class );
type( ListMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( ListMultimapJsonDeserializer.class );
type( ArrayListMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( ArrayListMultimapJsonDeserializer.class );
type( LinkedListMultimap.class ).serializer( MultimapJsonSerializer.class )
.deserializer( LinkedListMultimapJsonDeserializer.class );
}
项目:bacter
文件:ACGLikelihood.java
/**
* Ensure pattern counts are up to date.
*/
private void updatePatterns() {
List<Region> regionList = acg.getRegions(locus);
// Remove stale pattern sets
patterns.keySet().retainAll(regionList);
patternLogLikelihoods.keySet().retainAll(regionList);
rootPartials.keySet().retainAll(regionList);
constantPatterns.keySet().retainAll(regionList);
for (Region region : regionList) {
if (patterns.containsKey(region))
continue;
// Add new pattern set
Multiset<int[]> patSet = LinkedHashMultiset.create();
for (int j=region.leftBoundary; j<region.rightBoundary; j++) {
int [] pat = alignment.getPattern(alignment.getPatternIndex(j));
patSet.add(pat);
}
patterns.put(region,patSet);
// Allocate memory for corresponding log likelihoods and root partials
patternLogLikelihoods.put(region,new double[patSet.elementSet().size()]);
rootPartials.put(region,new double[patSet.elementSet().size()*nStates]);
// Compute corresponding constant pattern list
List<Integer> constantPatternList = new ArrayList<>();
int patternIdx = 0;
for (int[] pattern : patSet.elementSet()) {
boolean isConstant = true;
for (int i=1; i<pattern.length; i++)
if (pattern[i] != pattern[0]) {
isConstant = false;
break;
}
if (isConstant) {
if (alignment.getDataType().isAmbiguousstate(pattern[0])) {
if (useAmbiguitiesInput.get()) {
for (int state : alignment.getDataType().getStatesForCode(pattern[0]))
constantPatternList.add(patternIdx * nStates + state);
}
} else {
constantPatternList.add(patternIdx * nStates + pattern[0]);
}
}
patternIdx += 1;
}
constantPatterns.put(region,constantPatternList);
}
}
项目:bacter
文件:ACGLikelihoodBeagle.java
/**
* Ensure pattern counts are up to date.
*/
private void updatePatterns() {
List<Region> regionList = acg.getRegions(locus);
// Remove stale pattern sets
patterns.keySet().retainAll(regionList);
constantPatterns.keySet().retainAll(regionList);
for (Region region : regionList) {
if (patterns.containsKey(region))
continue;
// Add new pattern set
Multiset<int[]> patSet = LinkedHashMultiset.create();
for (int j=region.leftBoundary; j<region.rightBoundary; j++) {
int [] pat = alignment.getPattern(alignment.getPatternIndex(j));
patSet.add(pat);
}
patterns.put(region,patSet);
// Compute corresponding constant pattern list
List<Integer> constantPatternList = new ArrayList<>();
int patternIdx = 0;
for (int[] pattern : patSet.elementSet()) {
boolean isConstant = true;
for (int i=1; i<pattern.length; i++)
if (pattern[i] != pattern[0]) {
isConstant = false;
break;
}
if (isConstant) {
if (alignment.getDataType().isAmbiguousstate(pattern[0])) {
if (useAmbiguitiesInput.get()) {
for (int state : alignment.getDataType().getStatesForCode(pattern[0]))
constantPatternList.add(patternIdx * nStates + state);
}
} else {
constantPatternList.add(patternIdx * nStates + pattern[0]);
}
}
patternIdx += 1;
}
constantPatterns.put(region,constantPatternList);
}
}
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 [email protected] 举报,一经查实,本站将立刻删除。