repo
stringlengths
1
191
file
stringlengths
23
351
code
stringlengths
0
5.32M
file_length
int64
0
5.32M
avg_line_length
float64
0
2.9k
max_line_length
int64
0
288k
extension_type
stringclasses
1 value
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/analyzers/Analyzer.java
package org.grobid.core.analyzers; import org.grobid.core.layout.LayoutToken; import org.grobid.core.lang.Language; import java.util.List; import java.util.ArrayList; import java.util.StringTokenizer; /** * Abstract analyzer for tokenizing/filtering text. * */ public interface Analyzer { List<String> tokenize(String text); List<String> tokenize(String text, Language lang); List<String> retokenize(List<String> chunks); List<LayoutToken> tokenizeWithLayoutToken(String text); List<String> retokenizeSubdigits(List<String> chunks); List<LayoutToken> retokenizeSubdigitsWithLayoutToken(List<String> chunks); List<LayoutToken> retokenizeSubdigitsFromLayoutToken(List<LayoutToken> tokens); String getName(); }
739
22.870968
83
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/analyzers/GrobidAnalyzer.java
package org.grobid.core.analyzers; import org.grobid.core.lang.Language; import org.grobid.core.layout.LayoutToken; import org.grobid.core.utilities.UnicodeUtil; import org.grobid.core.utilities.LayoutTokensUtil; import org.wipo.nlp.textboundaries.ReTokenizer; import org.wipo.nlp.textboundaries.ReTokenizerFactory; import java.util.List; import java.util.ArrayList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An Analyzer able to dispatch text to be tokenized to the adequate analyzer given a specified language. * * The language might be preliminary set by the language recognizer or manually if it is already * known by the context of usage of the text. * */ public class GrobidAnalyzer implements Analyzer { private static final Logger LOGGER = LoggerFactory.getLogger(GrobidAnalyzer.class); private static volatile GrobidAnalyzer instance; private ReTokenizer jaAnalyzer = null; private ReTokenizer krAnalyzer = null; private ReTokenizer zhAnalyzer = null; public static GrobidAnalyzer getInstance() { if (instance == null) { //double check idiom synchronized (GrobidAnalyzer.class) { if (instance == null) { getNewInstance(); } } } return instance; } /** * Creates a new instance. */ private static synchronized void getNewInstance() { LOGGER.debug("Get new instance of GrobidAnalyzer"); instance = new GrobidAnalyzer(); } /** * Hidden constructor */ private GrobidAnalyzer() { try { krAnalyzer = ReTokenizerFactory.create("ko_g"); } catch(Exception e) { LOGGER.error("Invalid kr tokenizer", e); } } public String getName() { return "GrobidAnalyzer"; } /** * Tokenizer entry point */ public List<String> tokenize(String text) { return tokenize(text, null); } public List<String> tokenize(String text, Language lang) { List<String> result = new ArrayList<String>(); if ( (text == null) || (text.length() == 0) ) { return result; } try { //if (lang != null) //System.out.println("---> tokenize: " + text + " // " + lang.getLang()); if ( (lang == null) || (lang.getLang() == null) ) { // default Indo-European languages result = GrobidDefaultAnalyzer.getInstance().tokenize(text); } else if (lang.isJapaneses()) { // Japanese analyser if (jaAnalyzer == null) jaAnalyzer = ReTokenizerFactory.create("ja_g"); result = jaAnalyzer.tokensAsList(text); } else if (lang.isChinese()) { // Chinese analyser if (zhAnalyzer == null) zhAnalyzer = ReTokenizerFactory.create("zh_g"); result = zhAnalyzer.tokensAsList(text); } else if (lang.isKorean()) { // Korean analyser /*if (krAnalyzer == null) krAnalyzer = ReTokenizerFactory.create("ko_g");*/ result = krAnalyzer.tokensAsList(text); } else if (lang.isArabic()) { // Arabic analyser result = GrobidDefaultAnalyzer.getInstance().tokenize(text); int p = 0; for(String token : result) { // string being immutable in Java, I think we can't do better that this: StringBuilder newToken = new StringBuilder(); for(int i=0; i<token.length(); i++) { newToken.append(ArabicChars.arabicCharacters(token.charAt(i))); } result.set(p, newToken.toString()); p++; } } else { // default Indo-European languages result = GrobidDefaultAnalyzer.getInstance().tokenize(text); } } catch(Exception e) { LOGGER.error("Invalid tokenizer", e); } return result; } /** * Re-tokenizer entry point to be applied to text already tokenized in the PDF representation */ public List<String> retokenize(List<String> textTokenized) { return retokenize(textTokenized, null); } public List<String> retokenize(List<String> textTokenized, Language lang) { List<String> result = null; if ( (textTokenized == null) || (textTokenized.size() == 0) ) { return new ArrayList<String>(); } try { if ( (lang == null) || (lang.getLang() == null) ) { // default Indo-European languages result = GrobidDefaultAnalyzer.getInstance().retokenize(textTokenized); } else if (lang.isJapaneses()) { // Japanese analyser if (jaAnalyzer == null) jaAnalyzer = ReTokenizerFactory.create("ja_g"); for (String chunk : textTokenized) { List<String> localResult = jaAnalyzer.tokensAsList(chunk); result.addAll(localResult); } } else if (lang.isChinese()) { // Chinese analyser if (zhAnalyzer == null) zhAnalyzer = ReTokenizerFactory.create("zh_g"); for (String chunk : textTokenized) { List<String> localResult = zhAnalyzer.tokensAsList(chunk); result.addAll(localResult); } } else if (lang.isKorean()) { // Korean analyser /*if (krAnalyzer == null) krAnalyzer = ReTokenizerFactory.create("ko_g");*/ for (String chunk : textTokenized) { List<String> localResult = krAnalyzer.tokensAsList(chunk); result.addAll(localResult); } } else if (lang.isArabic()) { // Arabic analyser for(String token : textTokenized) { StringBuilder newToken = new StringBuilder(); for(int i=0; i<token.length(); i++) { newToken.append(ArabicChars.arabicCharacters(token.charAt(i))); } result.add(newToken.toString()); } } else { // default Indo-European languages result = GrobidDefaultAnalyzer.getInstance().retokenize(textTokenized); } } catch(Exception e) { LOGGER.error("Invalid tokenizer", e); } return result; } public List<LayoutToken> tokenizeWithLayoutToken(String text) { return tokenizeWithLayoutToken(text, null); } public List<LayoutToken> tokenizeWithLayoutToken(String text, Language lang) { text = UnicodeUtil.normaliseText(text); List<String> tokens = tokenize(text, lang); return LayoutTokensUtil.getLayoutTokensForTokenizedText(tokens); } public List<String> retokenizeSubdigits(List<String> chunks) { return GrobidDefaultAnalyzer.getInstance().retokenizeSubdigits(chunks); } public List<LayoutToken> retokenizeSubdigitsWithLayoutToken(List<String> chunks) { return GrobidDefaultAnalyzer.getInstance().retokenizeSubdigitsWithLayoutToken(chunks); } public List<LayoutToken> retokenizeSubdigitsFromLayoutToken(List<LayoutToken> tokens) { return GrobidDefaultAnalyzer.getInstance().retokenizeSubdigitsFromLayoutToken(tokens); } }
6,574
29.581395
105
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/analyzers/GrobidFilterDeleteSpaceBetweenSameAlphabet.java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Bruno Pouliquen @ WIPO */ package org.grobid.core.analyzers; import java.io.IOException; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; public final class GrobidFilterDeleteSpaceBetweenSameAlphabet extends TokenFilter { private CharTermAttribute termAttr; private TypeAttribute typeAttr; private PositionIncrementAttribute posAttr; private OffsetAttribute offsetAttr; private String previousBuffer; private int previousBufferLength=0; private String previousType=null; private int previousStartOffset=0; private int previousEndOffset=0; private int previousPosIncr=0; public GrobidFilterDeleteSpaceBetweenSameAlphabet (TokenStream input) { super(input); termAttr = (CharTermAttribute) addAttribute(CharTermAttribute.class); typeAttr=(TypeAttribute) addAttribute(TypeAttribute.class); offsetAttr = (OffsetAttribute) addAttribute(OffsetAttribute.class);; posAttr = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class); previousBuffer=null; } public boolean incrementToken() throws IOException { if (previousBuffer != null) { termAttr.setEmpty().append(previousBuffer); typeAttr.setType(previousType); offsetAttr.setOffset(previousStartOffset, previousEndOffset); posAttr.setPositionIncrement(previousPosIncr); previousBuffer= null; return true; } if (!input.incrementToken()) { //#B return false; //#C } char[] buffer = termAttr.buffer(); if (! isLatinChar(buffer[0])) return true; if (isDigit(buffer[0])) { // if (!isNumeral(previousBuffer)) return true; } previousBuffer=termAttr.toString(); previousBufferLength=termAttr.length(); previousType=typeAttr.type(); previousStartOffset=offsetAttr.startOffset(); previousEndOffset=offsetAttr.endOffset(); previousPosIncr = posAttr.getPositionIncrement(); boolean cont=true; String currentBuffer=null; int currentBufferLength=0; String currentType=null; int currentStartOffset=-1; int currentEndOffset=-1; int currentPosIncr=0; while (cont && input.incrementToken()) { currentBuffer=termAttr.toString(); currentBufferLength=termAttr.length(); currentType=typeAttr.type(); currentStartOffset=offsetAttr.startOffset(); currentEndOffset=offsetAttr.endOffset(); currentPosIncr = posAttr.getPositionIncrement(); // Series of conditions to concatenate tokens: if (( buffer[0]=='.' && isNumeral(previousBuffer) // 0 . => 0. ) || ( isNumeral(currentBuffer) && isNumeral(previousBuffer) // 1 2 => 12 ) || ( previousBuffer.endsWith(".") && isNumeral(previousBuffer) && isNumeral(currentBuffer) // 0. 1 => 0.1 ) || ( currentStartOffset >= previousEndOffset && isLatinChar(buffer[0]) && currentType.equals(previousType) && ( !(isNumeral(previousBuffer) && ! isNumeral(currentBuffer)) ) && (! (isNumeral(currentBuffer) && ! isNumeral(previousBuffer))) // a b => ab ) ) { //current token has the same alphabet, we concatenate them String n = previousBuffer + currentBuffer; previousBuffer=n; currentBuffer=null; previousEndOffset=currentEndOffset; } else { cont=false; break; } } termAttr.setEmpty().append(previousBuffer); typeAttr.setType(previousType); offsetAttr.setOffset(previousStartOffset, previousEndOffset); posAttr.setPositionIncrement(previousPosIncr); previousBuffer= null; if (currentBuffer != null) { previousBuffer=currentBuffer; previousBufferLength=currentBufferLength; previousType=currentType; previousStartOffset=currentStartOffset; previousEndOffset=currentEndOffset; previousPosIncr = currentPosIncr; } return true; } private boolean isDigit(char c) { return ((c>='0' && c<='9') || (c>=0xFF10 && c<=0xFF19)); } private boolean isLatinChar(char c) { return ((c >='a' && c<='z') || (c >='A' && c<='Z') || (c >='0' && c<='9') || (c>=0xFF10 && c<=0xFF19) || (c>=0xFF01 && c<=0xFF5E) ); } private boolean isNumeral(String s) { return (s!=null && !s.isEmpty() && isDigit(s.charAt(0))); } @Override public void reset() throws IOException { super.reset(); previousBuffer = null; previousBufferLength=0; previousType=null; previousStartOffset=0; previousEndOffset=0; previousPosIncr=0; } }
5,652
31.866279
91
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/analyzers/GrobidDefaultAnalyzer.java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.grobid.core.analyzers; import org.grobid.core.layout.LayoutToken; import org.grobid.core.utilities.TextUtilities; import org.grobid.core.utilities.UnicodeUtil; import org.grobid.core.lang.Language; import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; /** * Default tokenizer adequate for all Indo-European languages. * */ public class GrobidDefaultAnalyzer implements Analyzer { private static volatile GrobidDefaultAnalyzer instance; public static GrobidDefaultAnalyzer getInstance() { if (instance == null) { //double check idiom // synchronized (instanceController) { if (instance == null) getNewInstance(); // } } return instance; } /** * Creates a new instance. */ private static synchronized void getNewInstance() { instance = new GrobidDefaultAnalyzer(); } /** * Hidden constructor */ private GrobidDefaultAnalyzer() { } public static final String delimiters = TextUtilities.delimiters; // the following regex is used to separate alphabetical and numerical character subsequences // note: see about using \p{N} for unicode digits private static final String REGEX = "(?<=[\\p{L}])(?=\\d)|(?<=\\d)(?=\\D)"; public String getName() { return "DefaultGrobidAnalyzer"; } public List<String> tokenize(String text) { // as a default analyzer, language is not considered return tokenize(text, null); } public List<String> tokenize(String text, Language lang) { List<String> result = new ArrayList<>(); text = UnicodeUtil.normaliseText(text); StringTokenizer st = new StringTokenizer(text, delimiters, true); while (st.hasMoreTokens()) { result.add(st.nextToken()); } return result; } public List<String> retokenize(List<String> chunks) { StringTokenizer st = null; List<String> result = new ArrayList<>(); for (String chunk : chunks) { chunk = UnicodeUtil.normaliseText(chunk); st = new StringTokenizer(chunk, delimiters, true); while (st.hasMoreTokens()) { result.add(st.nextToken()); } } return result; } public List<LayoutToken> tokenizeWithLayoutToken(String text) { return tokenizeWithLayoutToken(text, null); } /** * Tokenize text returning list of LayoutTokens. */ public List<LayoutToken> tokenizeWithLayoutToken(String text, Language language) { List<LayoutToken> result = new ArrayList<>(); text = UnicodeUtil.normaliseText(text); List<String> tokens = tokenize(text, language); int pos = 0; for (int i = 0; i < tokens.size(); i++) { String tok = tokens.get(i); LayoutToken layoutToken = new LayoutToken(); layoutToken.setText(tok); layoutToken.setOffset(pos); result.add(layoutToken); pos += tok.length(); if (i < tokens.size() - 1 && tokens.get(i + 1).equals("\n")) { layoutToken.setNewLineAfter(true); } } return result; } /** * To tokenize mixture of alphabetical and numerical characters by separating * separate alphabetical and numerical character subsequences. To be used * when relevant. * <p> * 1m74 -> "1", "m", "74" * */ public List<String> retokenizeSubdigits(List<String> chunks) { List<String> result = new ArrayList<>(); for(String token : chunks) { // we split "letter" characters and digits String[] subtokens = token.split(REGEX); for (int i = 0; i < subtokens.length; i++) { result.add(subtokens[i]); } } return result; } /** * To tokenize mixture of alphabetical and numerical characters by separating * separate alphabetical and numerical character subsequences. To be used * when relevant. * <p> * 1m74 -> tokens.add(new LayoutToken("1")); * tokens.add(new LayoutToken("m")); * tokens.add(new LayoutToken("74")); * */ public List<LayoutToken> retokenizeSubdigitsWithLayoutToken(List<String> chunks) { List<LayoutToken> result = new ArrayList<>(); int offset = 0; for(String token : chunks) { // we split "letter" characters and digits String[] subtokens = token.split(REGEX); for (int i = 0; i < subtokens.length; i++) { LayoutToken layoutToken = new LayoutToken(); layoutToken.setText(subtokens[i]); layoutToken.setOffset(offset); offset += subtokens[i].length(); result.add(layoutToken); } } return result; } /** * To tokenize mixture of alphabetical and numerical characters by separating * separate alphabetical and numerical character subsequences. To be used * when relevant. * Input is a list of LayoutToken, but the coordinates of the subtokens are however * not recomputed here (at least for the moment). * <p> * 1m74 -> tokens.add(new LayoutToken("1")); * tokens.add(new LayoutToken("m")); * tokens.add(new LayoutToken("74")); * */ public List<LayoutToken> retokenizeSubdigitsFromLayoutToken(List<LayoutToken> tokens) { List<LayoutToken> result = new ArrayList<>(); for(LayoutToken token : tokens) { // we split "letter" characters and digits if (token.getText() == null || token.getText().trim().length() == 0) { result.add(token); } else { String tokenText = token.getText(); String[] subtokens = tokenText.split(REGEX); int offset = token.getOffset(); for (int i = 0; i < subtokens.length; i++) { LayoutToken layoutToken = new LayoutToken(); layoutToken.setText(subtokens[i]); layoutToken.setOffset(offset); offset += subtokens[i].length(); result.add(layoutToken); } } } return result; } }
7,225
34.07767
97
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/Note.java
package org.grobid.core.data; import org.grobid.core.layout.LayoutToken; import org.grobid.core.layout.Page; import org.grobid.core.utilities.*; import java.util.List; import static com.google.common.collect.Iterables.getLast; public class Note { public enum NoteType { FOOT, MARGIN }; private String identifier; private String label; private List<LayoutToken> tokens; private String text; private int offsetStartInPage; private boolean ignored = false; private NoteType noteType; public Note() { this.identifier = KeyGen.getKey().substring(0, 7); } public Note(String label, List<LayoutToken> tokens, String text, NoteType noteType) { this.identifier = KeyGen.getKey().substring(0, 7); this.label = label; this.tokens = tokens; this.text = text; this.noteType = noteType; } public Note(String label, List<LayoutToken> tokens, String text, int offsetStartInPage, NoteType noteType) { this.identifier = KeyGen.getKey().substring(0, 7); this.label = label; this.tokens = tokens; this.text = text; this.offsetStartInPage = offsetStartInPage; this.noteType = noteType; } public Note(String label, List<LayoutToken> tokens, NoteType noteType) { this.identifier = KeyGen.getKey().substring(0, 7); this.label = label; this.tokens = tokens; this.noteType = noteType; } public String getIdentifier() { return identifier; } public void setIdentifier(String identifier) { this.identifier = identifier; } public int getOffsetStartInPage() { return offsetStartInPage; } public void setOffsetStartInPage(int offsetStartInPage) { this.offsetStartInPage = offsetStartInPage; } public int getPageNumber() { return tokens.get(0).getPage(); } public String getText() { return text; } public void setText(String text) { this.text = text; } public List<LayoutToken> getTokens() { return tokens; } public void setTokens(List<LayoutToken> tokens) { this.tokens = tokens; } public String getLabel() { return this.label; } public void setLabel(String label) { this.label = label; } public int getOffsetEndInPage() { return getLast(tokens).getOffset(); } public boolean isIgnored() { return ignored; } public void setIgnored(boolean ignored) { this.ignored = ignored; } public NoteType getNoteType() { return this.noteType; } public void setNoteType(NoteType noteType) { this.noteType = noteType; } public String getNoteTypeName() { if (this.noteType == NoteType.FOOT) { return "foot"; } else { return "margin"; } } }
2,949
21.348485
112
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/Equation.java
package org.grobid.core.data; import nu.xom.Attribute; import nu.xom.Element; import org.apache.commons.lang3.StringUtils; import org.grobid.core.document.xml.XmlBuilderUtils; import org.grobid.core.engines.Engine; import org.grobid.core.engines.config.GrobidAnalysisConfig; import org.grobid.core.layout.BoundingBox; import org.grobid.core.layout.LayoutToken; import org.grobid.core.utilities.BoundingBoxCalculator; import org.grobid.core.utilities.LayoutTokensUtil; import org.grobid.core.utilities.counters.CntManager; import org.grobid.core.utilities.TextUtilities; import java.util.ArrayList; import java.util.List; import java.util.SortedSet; /** * Class for representing an equation. * */ public class Equation { protected StringBuilder content = null; protected StringBuilder label = null; protected String id = null; //protected int start = -1; // start position in the full text tokenization //protected int end = -1; // end position in the full text tokenization //protected LayoutToken startToken = null; // start layout token //protected LayoutToken endToken = null; // end layout token private List<BoundingBox> textArea; private List<LayoutToken> layoutTokens; private List<LayoutToken> contentTokens = new ArrayList<>(); private List<LayoutToken> labelTokens = new ArrayList<>(); //private SortedSet<Integer> blockPtrs; public Equation() { content = new StringBuilder(); label = new StringBuilder(); } public Element toTEIElement(GrobidAnalysisConfig config) { if (StringUtils.isEmpty(content)) { return null; } Element formulaElement = XmlBuilderUtils.teiElement("formula"); if (id != null) { XmlBuilderUtils.addXmlId(formulaElement, this.getTeiId()); } if ((config.getGenerateTeiCoordinates() != null) && (config.getGenerateTeiCoordinates().contains("formula"))) { XmlBuilderUtils.addCoords(formulaElement, LayoutTokensUtil.getCoordsStringForOneBox(getLayoutTokens())); } formulaElement.appendChild(LayoutTokensUtil.normalizeText(content.toString()).trim()); if ( (label != null) && (label.length()>0) ) { Element labelEl = XmlBuilderUtils.teiElement("label", LayoutTokensUtil.normalizeText(label.toString())); formulaElement.appendChild(labelEl); } return formulaElement; } public String toTEI(GrobidAnalysisConfig config) { Element formulaElement = toTEIElement(config); if (formulaElement != null) return formulaElement.toXML(); else return null; } public List<LayoutToken> getContentTokens() { return contentTokens; } public List<LayoutToken> getLabelTokens() { return labelTokens; } public void appendLabel(String lab) { label.append(lab); } public String getLabel() { return label.toString(); } public void appendContent(String trash) { content.append(trash); } public String getContent() { return content.toString(); } /*public void setStart(int start) { this.start = start; }*/ public int getStart() { if ( (layoutTokens != null) && (layoutTokens.size()>0) ) return layoutTokens.get(0).getOffset(); else return -1; } /*public void setEnd(int end) { this.end = end; }*/ public int getEnd() { if ( (layoutTokens != null) && (layoutTokens.size()>0) ) return layoutTokens.get(layoutTokens.size()-1).getOffset(); else return -1; } /*public void setStartToken(LayoutToken start) { this.startToken = start; } public LayoutToken getStartToken() { return startToken; } public void setEndToken(LayoutToken end) { this.endToken = end; } public LayoutToken getEndToken() { return endToken; }*/ public void setId() { this.id = TextUtilities.cleanField(label.toString(), false); } public void setId(String theId) { this.id = theId; } public String getId() { return this.id; } public String getTeiId() { return "formula_" + this.id; } /*public void setBlockPtrs(SortedSet<Integer> blockPtrs) { this.blockPtrs = blockPtrs; } public SortedSet<Integer> getBlockPtrs() { return blockPtrs; }*/ public List<LayoutToken> getLayoutTokens() { return layoutTokens; } public void setLayoutTokens(List<LayoutToken> layoutTokens) { this.layoutTokens = layoutTokens; } public void addLayoutToken(LayoutToken token) { if (token == null) return; if (layoutTokens == null) layoutTokens = new ArrayList<LayoutToken>(); layoutTokens.add(token); } public void addLayoutTokens(List<LayoutToken> tokens) { if (tokens == null) return; if (layoutTokens == null) layoutTokens = new ArrayList<LayoutToken>(); for(LayoutToken token : tokens) layoutTokens.add(token); } public List<BoundingBox> getCoordinates() { if (layoutTokens == null || layoutTokens.size() == 0) return null; else { BoundingBox oneBox = BoundingBoxCalculator.calculateOneBox(layoutTokens, true); List<BoundingBox> result = new ArrayList<BoundingBox>(); result.add(oneBox); return result; } } }
5,357
25.924623
113
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/Date.java
package org.grobid.core.data; import org.grobid.core.utilities.TextUtilities; /** * Class for representing a date. * We use our own representation of dates for having a comparable which prioritize the most fully specified * dates first, then the earliest date, i.e.: * 10.2010 < 2010 * 20.10.2010 < 10.2010 * 19.10.2010 < 20.10.2010 * 1999 < 10.2000 * 10.1999 < 2000 * which is not the same as a comparison based only on time flow. * For comparing dates by strict time flow, please use java.util.Date + java.util.Calendar */ public class Date implements Comparable<Date> { private int day = -1; private int month = -1; private int year = -1; private String rawDate = null; private String dayString = null; private String monthString = null; private String yearString = null; public Date() { } public Date(Date fromDate) { this.day = fromDate.day; this.month = fromDate.month; this.year = fromDate.year; this.rawDate = fromDate.rawDate; this.dayString = fromDate.dayString; this.monthString = fromDate.monthString; this.yearString = fromDate.yearString; } public int getDay() { return day; } public void setDay(int d) { day = d; } public int getMonth() { return month; } public void setMonth(int d) { month = d; } public int getYear() { return year; } public void setYear(int d) { year = d; } public String getRawDate() { return rawDate; } public void setRawDate(String s) { rawDate = s; } public String getDayString() { return dayString; } public void setDayString(String d) { dayString = d; } public String getMonthString() { return monthString; } public void setMonthString(String d) { monthString = d; } public String getYearString() { return yearString; } public void setYearString(String d) { yearString = d; } /*public java.util.Date getJavaDate() { java.util.Calendar cal = new java.util.Calendar(); cal.set(year, month, day); }*/ /** * The lowest date always win. */ public int compareTo(Date another) { final int BEFORE = -1; final int EQUAL = 0; final int AFTER = 1; if (another.getYear() == -1) { return BEFORE; } else if (year == -1) { return AFTER; } else if (year < another.getYear()) { return BEFORE; } else if (year > another.getYear()) { return AFTER; } else { // years are identical if (another.getMonth() == -1) { return BEFORE; } else if (month == -1) { return AFTER; } else if (month < another.getMonth()) { return BEFORE; } else if (month > another.getMonth()) { return AFTER; } else { // months are identical if (another.getDay() == -1) { return BEFORE; } else if (day == -1) { return AFTER; } else if (day < another.getDay()) { return BEFORE; } else if (day > another.getDay()) { return AFTER; } } } return EQUAL; } public boolean isNotNull() { return (rawDate != null) || (dayString != null) || (monthString != null) || (yearString != null) || (day != -1) || (month != -1) || (year != -1); } public boolean isAmbiguous() { return false; } public static String toISOString(Date date) { int year = date.getYear(); int month = date.getMonth(); int day = date.getDay(); String when = ""; if (year != -1) { if (year <= 9) when += "000" + year; else if (year <= 99) when += "00" + year; else if (year <= 999) when += "0" + year; else when += year; if (month != -1) { if (month <= 9) when += "-0" + month; else when += "-" + month; if (day != -1) { if (day <= 9) when += "-0" + day; else when += "-" + day; } } } return when; } /** * Return a new date instance by merging the date information from a first date with * the date information from a second date. * The merging follows the year, month, day sequence. If the years * for instance clash, the merging is stopped. * * Examples of merging: * "2010" "2010-10" -> "2010-10" * "2010" "2010-10-27" -> "2010-10-27" * "2010-10" "2010-10-27" -> "2010-10-27" * "2010-10-27" "2010-10" -> "2010-10-27" * "2011-10" "2010-10-27" -> "2011-10" * "2010" "2016-10-27" -> "2010" * "2011" "2010" -> 2011 */ public static Date merge(Date date1, Date date2) { if (date1.getYear() == -1) { return new Date(date2); } if (date1.getYear() == date2.getYear()) { if (date1.getMonth() == -1 && date2.getMonth() != -1) { return new Date(date2); } if (date1.getMonth() == date2.getMonth()) { if (date1.getDay() == -1 && date2.getDay() != -1) { return new Date(date2); } } } return new Date(date1); } public String toString() { String theDate = ""; if (day != -1) { theDate += day + "-"; } if (month != -1) { theDate += month + "-"; } if (year != -1) { theDate += year; } theDate += " / "; if (dayString != null) { theDate += dayString + "-"; } if (monthString != null) { theDate += monthString + "-"; } if (yearString != null) { theDate += yearString; } return theDate; } public String toTEI() { // TEI uses ISO 8601 for date encoding String theDate = "<date when=\""; if (year != -1) { theDate += year; } if (month != -1) { theDate += "-" + month; } if (day != -1) { theDate += "-" + day; } if (rawDate != null) { theDate += "\">"+TextUtilities.HTMLEncode(rawDate)+"</date>"; } else { theDate += "\" />"; } return theDate; } public String toXML() { String theDate = "<date>"; if (day != -1) { theDate += "<day>" + day + "</day>"; } if (month != -1) { theDate += "<month>" + month + "</month>"; } if (year != -1) { theDate += "<year>" + year + "</year>"; } theDate += "</date>"; return theDate; } }
7,334
24.293103
107
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/Affiliation.java
package org.grobid.core.data; import org.grobid.core.utilities.TextUtilities; import org.grobid.core.lexicon.Lexicon; import java.util.ArrayList; import java.util.List; /** * Class for representing and exchanging affiliation information. * */ public class Affiliation { private String acronym = null; private String name = null; private String url = null; private List<String> institutions = null; // for additional institutions private List<String> departments = null; // for additional departments private List<String> laboratories = null; // for additional laboratories private String country = null; private String postCode = null; private String postBox = null; private String region = null; private String settlement = null; private String addrLine = null; private String marker = null; private String addressString = null; // unspecified address field private String affiliationString = null; // unspecified affiliation field private String rawAffiliationString = null; // raw affiliation text (excluding marker) private boolean failAffiliation = true; // tag for unresolved affiliation attachment // an identifier for the affiliation independent from the marker, present in the TEI result private String key = null; public Affiliation() { } public Affiliation(org.grobid.core.data.Affiliation aff) { acronym = aff.getAcronym(); name = aff.getName(); url = aff.getURL(); addressString = aff.getAddressString(); country = aff.getCountry(); marker = aff.getMarker(); departments = aff.getDepartments(); institutions = aff.getInstitutions(); laboratories = aff.getLaboratories(); postCode = aff.getPostCode(); postBox = aff.getPostBox(); region = aff.getRegion(); settlement = aff.getSettlement(); addrLine = aff.getAddrLine(); affiliationString = aff.getAffiliationString(); rawAffiliationString = aff.getRawAffiliationString(); } public String getAcronym() { return acronym; } public String getName() { return name; } public String getURL() { return url; } public String getAddressString() { return addressString; } public String getCountry() { return country; } public String getMarker() { return marker; } public String getPostCode() { return postCode; } public String getPostBox() { return postBox; } public String getRegion() { return region; } public String getSettlement() { return settlement; } public String getAddrLine() { return addrLine; } public String getAffiliationString() { return affiliationString; } public String getRawAffiliationString() { return rawAffiliationString; } public List<String> getInstitutions() { return institutions; } public List<String> getLaboratories() { return laboratories; } public List<String> getDepartments() { return departments; } public String getKey() { return key; } public void setAcronym(String s) { acronym = s; } public void setName(String s) { name = s; } public void setURL(String s) { url = s; } public void setAddressString(String s) { addressString = s; } public void setCountry(String s) { country = s; } public void setMarker(String s) { marker = s; } public void setPostCode(String s) { postCode = s; } public void setPostBox(String s) { postBox = s; } public void setRegion(String s) { region = s; } public void setSettlement(String s) { settlement = s; } public void setAddrLine(String s) { addrLine = s; } public void setAffiliationString(String s) { affiliationString = s; } public void setRawAffiliationString(String s) { rawAffiliationString = s; } public void setInstitutions(List<String> affs) { institutions = affs; } public void addInstitution(String aff) { if (institutions == null) institutions = new ArrayList<String>(); institutions.add(TextUtilities.cleanField(aff, true)); } public void setDepartments(List<String> affs) { departments = affs; } public void addDepartment(String aff) { if (departments == null) departments = new ArrayList<String>(); departments.add(TextUtilities.cleanField(aff, true)); } public void setLaboratories(List<String> affs) { laboratories = affs; } public void addLaboratory(String aff) { if (laboratories == null) laboratories = new ArrayList<String>(); laboratories.add(TextUtilities.cleanField(aff, true)); } public void extendFirstInstitution(String theExtend) { if (institutions == null) { institutions = new ArrayList<String>(); institutions.add(TextUtilities.cleanField(theExtend, true)); } else { String first = institutions.get(0); first = first + theExtend; institutions.set(0, first); } } public void extendLastInstitution(String theExtend) { if (institutions == null) { institutions = new ArrayList<String>(); institutions.add(TextUtilities.cleanField(theExtend, true)); } else { String first = institutions.get(institutions.size() - 1); first = first + theExtend; institutions.set(institutions.size() - 1, first); } } public void extendFirstDepartment(String theExtend) { if (departments == null) { departments = new ArrayList<String>(); departments.add(TextUtilities.cleanField(theExtend, true)); } else { String first = departments.get(0); first = first + theExtend; departments.set(0, first); } } public void extendLastDepartment(String theExtend) { if (departments == null) { departments = new ArrayList<String>(); departments.add(TextUtilities.cleanField(theExtend, true)); } else { String first = departments.get(departments.size() - 1); first = first + theExtend; departments.set(departments.size() - 1, first); } } public void extendFirstLaboratory(String theExtend) { if (laboratories == null) { laboratories = new ArrayList<String>(); laboratories.add(TextUtilities.cleanField(theExtend, true)); } else { String first = laboratories.get(0); first = first + theExtend; laboratories.set(0, first); } } public void extendLastLaboratory(String theExtend) { if (laboratories == null) { laboratories = new ArrayList<String>(); laboratories.add(TextUtilities.cleanField(theExtend, true)); } else { String first = laboratories.get(laboratories.size() - 1); first = first + theExtend; laboratories.set(laboratories.size() - 1, first); } } public boolean notNull() { return !((departments == null) & (institutions == null) & (laboratories == null) & (country == null) & (postCode == null) & (postBox == null) & (region == null) & (settlement == null) & (addrLine == null) & (affiliationString == null) & (addressString == null)); } public void setFailAffiliation(boolean b) { failAffiliation = b; } public boolean getFailAffiliation() { return failAffiliation; } public void setKey(String key) { this.key = key; } public void clean() { if (departments != null) { List<String> newDepartments = new ArrayList<String>(); for (String department : departments) { String dep = TextUtilities.cleanField(department, true); if (dep != null && dep.length() > 2) { newDepartments.add(dep); } } departments = newDepartments; } if (institutions != null) { List<String> newInstitutions = new ArrayList<String>(); for (String institution : institutions) { String inst = TextUtilities.cleanField(institution, true); if (inst != null && inst.length() > 1) { newInstitutions.add(inst); } } institutions = newInstitutions; } if (laboratories != null) { List<String> newLaboratories = new ArrayList<String>(); for (String laboratorie : laboratories) { String inst = TextUtilities.cleanField(laboratorie, true); if (inst != null && inst.length() > 2) { newLaboratories.add(inst); } } laboratories = newLaboratories; } if (country != null) { country = TextUtilities.cleanField(country, true); if (country != null && country.endsWith(")")) { // for some reason the ) at the end of this field is not removed country = country.substring(0,country.length()-1); } if (country != null && country.length() < 2) country = null; } if (postCode != null) { postCode = TextUtilities.cleanField(postCode, true); if (postCode != null && postCode.length() < 2) postCode = null; } if (postBox != null) { postBox = TextUtilities.cleanField(postBox, true); if (postBox != null && postBox.length() < 2) postBox = null; } if (region != null) { region = TextUtilities.cleanField(region, true); if (region != null && region.length() < 2) region = null; } if (settlement != null) { settlement = TextUtilities.cleanField(settlement, true); if (settlement != null && settlement.length() < 2) settlement = null; } if (addrLine != null) { addrLine = TextUtilities.cleanField(addrLine, true); if (addrLine != null && addrLine.length() < 2) addrLine = null; } if (addressString != null) { addressString = TextUtilities.cleanField(addressString, true); if (addressString != null && addressString.length() < 2) addressString = null; } if (affiliationString != null) { affiliationString = TextUtilities.cleanField(affiliationString, true); if (affiliationString != null && affiliationString.length() < 2) affiliationString = null; } if (marker != null) { marker = TextUtilities.cleanField(marker, true); if (marker != null) marker = marker.replace(" ", ""); } } /** * Return the number of overall structure members (address included) */ public int nbStructures() { int nbStruct = 0; if (departments != null) { nbStruct += departments.size(); } if (institutions != null) { nbStruct += institutions.size(); } if (laboratories != null) { nbStruct += laboratories.size(); } if (country != null) { nbStruct++; } if (postCode != null) { nbStruct++; } if (postBox != null) { nbStruct++; } if (region != null) { nbStruct++; } if (settlement != null) { nbStruct++; } if (addrLine != null) { nbStruct++; } if (marker != null) { nbStruct++; } return nbStruct; } @Deprecated public String toTEI() { StringBuilder tei = new StringBuilder(); if (!notNull()) { return null; } else { tei.append("<affiliation"); if (key != null) tei.append(" key=\"").append(key).append("\""); tei.append(">"); if (departments != null) { if (departments.size() == 1) { tei.append("<orgName type=\"department\">").append(TextUtilities.HTMLEncode(departments.get(0))).append("</orgName>"); } else { int q = 1; for (String depa : departments) { tei.append("<orgName type=\"department\" key=\"dep").append(q).append("\">").append(TextUtilities.HTMLEncode(depa)).append("</orgName>"); q++; } } } if (laboratories != null) { if (laboratories.size() == 1) { tei.append("<orgName type=\"laboratory\">").append(TextUtilities.HTMLEncode(laboratories.get(0))).append("</orgName>"); } else { int q = 1; for (String labo : laboratories) { tei.append("<orgName type=\"laboratory\" key=\"lab").append(q).append("\">").append(TextUtilities.HTMLEncode(labo)).append("</orgName>"); q++; } } } if (institutions != null) { if (institutions.size() == 1) { tei.append("<orgName type=\"institution\">").append(TextUtilities.HTMLEncode(institutions.get(0))).append("</orgName>"); } else { int q = 1; for (String inst : institutions) { tei.append("<orgName type=\"institution\" key=\"instit").append(q).append("\">").append(TextUtilities.HTMLEncode(inst)).append("</orgName>"); q++; } } } if ((getAddressString() != null) | (getAddrLine() != null) | (getPostBox() != null) | (getPostCode() != null) | (getSettlement() != null) | (getRegion() != null) | (getCountry() != null)) { tei.append("<address>"); if (getAddressString() != null) { tei.append("<addrLine>").append(TextUtilities.HTMLEncode(getAddressString())).append("</addrLine>"); } if (getAddrLine() != null) { tei.append("<addrLine>").append(TextUtilities.HTMLEncode(getAddrLine())).append("</addrLine>"); } if (getPostBox() != null) { tei.append("<postBox>").append(TextUtilities.HTMLEncode(getPostBox())).append("</postBox>"); } if (getPostCode() != null) { tei.append("<postCode>").append(TextUtilities.HTMLEncode(getPostCode())).append("</postCode>"); } if (getSettlement() != null) { tei.append("<settlement>").append(TextUtilities.HTMLEncode(getSettlement())).append("</settlement>"); } if (getRegion() != null) { tei.append("<region>").append(TextUtilities.HTMLEncode(getRegion())).append("</region>"); } if (getCountry() != null) { Lexicon lexicon = Lexicon.getInstance(); String code = lexicon.getCountryCode(getCountry()); tei.append("<country"); if (code != null) tei.append(" key=\"").append(code).append("\""); tei.append(">").append(TextUtilities.HTMLEncode(getCountry())).append("</country>"); } tei.append("</address>"); } tei.append("</affiliation>"); } return tei.toString(); } public static String toTEI(Affiliation aff, int nbTag) { StringBuffer tei = new StringBuffer(); TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("<affiliation"); if (aff.getKey() != null) tei.append(" key=\"").append(aff.getKey()).append("\""); tei.append(">\n"); if (aff.getDepartments() != null) { if (aff.getDepartments().size() == 1) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<orgName type=\"department\">" + TextUtilities.HTMLEncode(aff.getDepartments().get(0)) + "</orgName>\n"); } else { int q = 1; for (String depa : aff.getDepartments()) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<orgName type=\"department\" key=\"dep" + q + "\">" + TextUtilities.HTMLEncode(depa) + "</orgName>\n"); q++; } } } if (aff.getLaboratories() != null) { if (aff.getLaboratories().size() == 1) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<orgName type=\"laboratory\">" + TextUtilities.HTMLEncode(aff.getLaboratories().get(0)) + "</orgName>\n"); } else { int q = 1; for (String labo : aff.getLaboratories()) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<orgName type=\"laboratory\" key=\"lab" + q + "\">" + TextUtilities.HTMLEncode(labo) + "</orgName>\n"); q++; } } } if (aff.getInstitutions() != null) { if (aff.getInstitutions().size() == 1) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<orgName type=\"institution\">" + TextUtilities.HTMLEncode(aff.getInstitutions().get(0)) + "</orgName>\n"); } else { int q = 1; for (String inst : aff.getInstitutions()) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<orgName type=\"institution\" key=\"instit" + q + "\">" + TextUtilities.HTMLEncode(inst) + "</orgName>\n"); q++; } } } if ((aff.getAddressString() != null) || (aff.getAddrLine() != null) || (aff.getPostBox() != null) || (aff.getPostCode() != null) || (aff.getSettlement() != null) || (aff.getRegion() != null) || (aff.getCountry() != null)) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<address>\n"); if (aff.getAddressString() != null) { TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<addrLine>" + TextUtilities.HTMLEncode(aff.getAddressString()) + "</addrLine>\n"); } if (aff.getAddrLine() != null) { TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<addrLine>" + TextUtilities.HTMLEncode(aff.getAddrLine()) + "</addrLine>\n"); } if (aff.getPostBox() != null) { TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<postBox>" + TextUtilities.HTMLEncode(aff.getPostBox()) + "</postBox>\n"); } if (aff.getPostCode() != null) { TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<postCode>" + TextUtilities.HTMLEncode(aff.getPostCode()) + "</postCode>\n"); } if (aff.getSettlement() != null) { TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<settlement>" + TextUtilities.HTMLEncode(aff.getSettlement()) + "</settlement>\n"); } if (aff.getRegion() != null) { TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<region>" + TextUtilities.HTMLEncode(aff.getRegion()) + "</region>\n"); } if (aff.getCountry() != null) { String code = Lexicon.getInstance().getCountryCode(aff.getCountry()); TextUtilities.appendN(tei, '\t', nbTag + 3); tei.append("<country"); if (code != null) tei.append(" key=\"" + code + "\""); tei.append(">" + TextUtilities.HTMLEncode(aff.getCountry()) + "</country>\n"); } TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("</address>\n"); } TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("</affiliation>\n"); return tei.toString(); } @Override public String toString() { return "Affiliation{" + "name='" + name + '\'' + ", url='" + url + '\'' + ", key='" + key + '\'' + ", institutions=" + institutions + ", departments=" + departments + ", laboratories=" + laboratories + ", country='" + country + '\'' + ", postCode='" + postCode + '\'' + ", postBox='" + postBox + '\'' + ", region='" + region + '\'' + ", settlement='" + settlement + '\'' + ", addrLine='" + addrLine + '\'' + ", marker='" + marker + '\'' + ", addressString='" + addressString + '\'' + ", affiliationString='" + affiliationString + '\'' + ", rawAffiliationString='" + rawAffiliationString + '\'' + ", failAffiliation=" + failAffiliation + '}'; } }
22,596
33.499237
165
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/Table.java
package org.grobid.core.data; import org.grobid.core.GrobidModels; import org.apache.commons.lang3.StringUtils; import org.grobid.core.data.table.Cell; import org.grobid.core.data.table.Line; import org.grobid.core.data.table.LinePart; import org.grobid.core.data.table.Row; import org.grobid.core.document.xml.XmlBuilderUtils; import org.grobid.core.document.Document; import org.grobid.core.document.TEIFormatter; import org.grobid.core.engines.Engine; import org.grobid.core.engines.config.GrobidAnalysisConfig; import org.grobid.core.layout.BoundingBox; import org.grobid.core.layout.LayoutToken; import org.grobid.core.utilities.BoundingBoxCalculator; import org.grobid.core.utilities.LayoutTokensUtil; import org.grobid.core.utilities.counters.CntManager; import org.grobid.core.engines.counters.TableRejectionCounters; import org.grobid.core.tokenization.TaggingTokenCluster; import org.grobid.core.tokenization.TaggingTokenClusteror; import org.grobid.core.utilities.KeyGen; import org.grobid.core.engines.label.TaggingLabels; import org.grobid.core.engines.label.TaggingLabel; import org.grobid.core.engines.citations.CalloutAnalyzer.MarkerType; import java.util.ArrayList; import java.util.List; import nu.xom.Attribute; import nu.xom.Element; import nu.xom.Node; import nu.xom.Text; import static org.grobid.core.document.xml.XmlBuilderUtils.teiElement; import static org.grobid.core.document.xml.XmlBuilderUtils.addXmlId; import static org.grobid.core.document.xml.XmlBuilderUtils.textNode; /** * Class for representing a table. * */ public class Table extends Figure { private List<LayoutToken> contentTokens = new ArrayList<>(); private List<LayoutToken> fullDescriptionTokens = new ArrayList<>(); private boolean goodTable = true; private StringBuilder note = null; private List<LayoutToken> noteLayoutTokens = null; private String labeledNote = null; public void setGoodTable(boolean goodTable) { this.goodTable = goodTable; } public Table() { caption = new StringBuilder(); header = new StringBuilder(); content = new StringBuilder(); label = new StringBuilder(); note = new StringBuilder(); } @Override public String toTEI(GrobidAnalysisConfig config, Document doc, TEIFormatter formatter, List<MarkerType> markerTypes) { if (StringUtils.isEmpty(header) && StringUtils.isEmpty(caption)) { return null; } Element tableElement = XmlBuilderUtils.teiElement("figure"); tableElement.addAttribute(new Attribute("type", "table")); if (id != null) { XmlBuilderUtils.addXmlId(tableElement, "tab_" + id); } // this is non TEI, to be reviewed //tableElement.addAttribute(new Attribute("validated", String.valueOf(isGoodTable()))); if ((config.getGenerateTeiCoordinates() != null) && (config.getGenerateTeiCoordinates().contains("figure"))) { XmlBuilderUtils.addCoords(tableElement, LayoutTokensUtil.getCoordsStringForOneBox(getLayoutTokens())); } Element headEl = XmlBuilderUtils.teiElement("head", LayoutTokensUtil.normalizeText(header.toString())); Element labelEl = XmlBuilderUtils.teiElement("label", LayoutTokensUtil.normalizeText(label.toString())); /*Element descEl = XmlBuilderUtils.teiElement("figDesc"); descEl.appendChild(LayoutTokensUtil.normalizeText(caption.toString()).trim()); if ((config.getGenerateTeiCoordinates() != null) && (config.getGenerateTeiCoordinates().contains("figure"))) { XmlBuilderUtils.addCoords(descEl, LayoutTokensUtil.getCoordsString(getFullDescriptionTokens())); }*/ Element desc = null; if (caption != null) { // if the segment has been parsed with the full text model we further extract the clusters // to get the bibliographical references desc = XmlBuilderUtils.teiElement("figDesc"); if (config.isGenerateTeiIds()) { String divID = KeyGen.getKey().substring(0, 7); addXmlId(desc, "_" + divID); } if ( (labeledCaption != null) && (labeledCaption.length() > 0) ) { TaggingTokenClusteror clusteror = new TaggingTokenClusteror(GrobidModels.FULLTEXT, labeledCaption, captionLayoutTokens); List<TaggingTokenCluster> clusters = clusteror.cluster(); for (TaggingTokenCluster cluster : clusters) { if (cluster == null) { continue; } MarkerType citationMarkerType = null; if (markerTypes != null && markerTypes.size()>0) { citationMarkerType = markerTypes.get(0); } TaggingLabel clusterLabel = cluster.getTaggingLabel(); //String clusterContent = LayoutTokensUtil.normalizeText(cluster.concatTokens()); String clusterContent = LayoutTokensUtil.normalizeDehyphenizeText(cluster.concatTokens()); if (clusterLabel.equals(TaggingLabels.CITATION_MARKER)) { try { List<Node> refNodes = formatter.markReferencesTEILuceneBased( cluster.concatTokens(), doc.getReferenceMarkerMatcher(), config.isGenerateTeiCoordinates("ref"), false, citationMarkerType); if (refNodes != null) { for (Node n : refNodes) { desc.appendChild(n); } } } catch(Exception e) { LOGGER.warn("Problem when serializing TEI fragment for table caption", e); } } else { desc.appendChild(textNode(clusterContent)); } if (desc != null && config.isWithSentenceSegmentation()) { formatter.segmentIntoSentences(desc, this.captionLayoutTokens, config, doc.getLanguage()); // we need a sentence segmentation of the table caption, for that we need to introduce // a <div>, then a <p> desc.setLocalName("p"); Element div = XmlBuilderUtils.teiElement("div"); div.appendChild(desc); Element figDesc = XmlBuilderUtils.teiElement("figDesc"); figDesc.appendChild(div); desc = figDesc; } } } else { desc.appendChild(LayoutTokensUtil.normalizeText(caption.toString()).trim()); } } Element contentEl = XmlBuilderUtils.teiElement("table"); processTableContent(contentEl, this.getContentTokens()); if ((config.getGenerateTeiCoordinates() != null) && (config.getGenerateTeiCoordinates().contains("figure"))) { XmlBuilderUtils.addCoords(contentEl, LayoutTokensUtil.getCoordsStringForOneBox(getContentTokens())); } Element noteNode = null; if (note != null && note.toString().trim().length()>0) { noteNode = XmlBuilderUtils.teiElement("note"); if (config.isGenerateTeiIds()) { String divID = KeyGen.getKey().substring(0, 7); addXmlId(noteNode, "_" + divID); } if ( (labeledNote != null) && (labeledNote.length() > 0) ) { TaggingTokenClusteror clusteror = new TaggingTokenClusteror(GrobidModels.FULLTEXT, labeledNote, noteLayoutTokens); List<TaggingTokenCluster> clusters = clusteror.cluster(); for (TaggingTokenCluster cluster : clusters) { if (cluster == null) { continue; } MarkerType citationMarkerType = null; if (markerTypes != null && markerTypes.size()>0) { citationMarkerType = markerTypes.get(0); } TaggingLabel clusterLabel = cluster.getTaggingLabel(); //String clusterContent = LayoutTokensUtil.normalizeText(cluster.concatTokens()); String clusterContent = LayoutTokensUtil.normalizeDehyphenizeText(cluster.concatTokens()); if (clusterLabel.equals(TaggingLabels.CITATION_MARKER)) { try { List<Node> refNodes = formatter.markReferencesTEILuceneBased( cluster.concatTokens(), doc.getReferenceMarkerMatcher(), config.isGenerateTeiCoordinates("ref"), false, citationMarkerType); if (refNodes != null) { for (Node n : refNodes) { noteNode.appendChild(n); } } } catch(Exception e) { LOGGER.warn("Problem when serializing TEI fragment for table note", e); } } else { noteNode.appendChild(textNode(clusterContent)); } if (noteNode != null && config.isWithSentenceSegmentation()) { // we need a sentence segmentation of the figure caption formatter.segmentIntoSentences(noteNode, this.noteLayoutTokens, config, doc.getLanguage()); } // enclose note content in a <p> element if (noteNode != null) { noteNode.setLocalName("p"); Element tabNote = XmlBuilderUtils.teiElement("note"); tabNote.appendChild(noteNode); noteNode = tabNote; } } } else { noteNode = XmlBuilderUtils.teiElement("note", LayoutTokensUtil.normalizeText(note.toString()).trim()); } String coords = null; if (config.isGenerateTeiCoordinates("note")) { coords = LayoutTokensUtil.getCoordsString(noteLayoutTokens); } if (coords != null) { noteNode.addAttribute(new Attribute("coords", coords)); } } tableElement.appendChild(headEl); tableElement.appendChild(labelEl); if (desc != null) tableElement.appendChild(desc); tableElement.appendChild(contentEl); if (noteNode != null) tableElement.appendChild(noteNode); return tableElement.toXML(); } /** * * @param contentEl table element to append parsed rows and cells. * @param contentTokens tokens that are used to build cells * Line-based algorithm for parsing tables, uses tokens' coordinates to identify lines */ void processTableContent(Element contentEl, List<LayoutToken> contentTokens) { // Join Layout Tokens into cell lines originally created by PDFAlto List<LinePart> lineParts = Line.extractLineParts(contentTokens); // Build lines by comparing borders List<Line> lines = Line.extractLines(lineParts); // Build rows and cells List<Row> rows = Row.extractRows(lines); int columnCount = Row.columnCount(rows); Row.insertEmptyCells(rows, columnCount); Row.mergeMulticolumnCells(rows); for (Row row: rows) { Element tr = XmlBuilderUtils.teiElement("row"); contentEl.appendChild(tr); List<Cell> cells = row.getContent(); for (Cell cell: cells) { Element td = XmlBuilderUtils.teiElement("cell"); tr.appendChild(td); if (cell.getColspan() > 1) { td.addAttribute(new Attribute("cols", Integer.toString(cell.getColspan()))); } td.appendChild(cell.getText().trim()); } } } private String cleanString(String input) { return input.replace("\n", " ").replace(" ", " ").trim(); } public String getNote() { return note.toString(); } public void setNote(StringBuilder note) { this.note = note; } public void appendNote(String noteChunk) { note.append(noteChunk); } // if an extracted table passes some validations rules public boolean firstCheck() { goodTable = goodTable && validateTable(); return goodTable; } public boolean secondCheck() { goodTable = goodTable && !badTableAdvancedCheck(); return goodTable; } public List<LayoutToken> getNoteLayoutTokens() { return noteLayoutTokens; } public void setNoteLayoutTokens(List<LayoutToken> tokens) { this.noteLayoutTokens = tokens; } public void addNoteLayoutToken(LayoutToken token) { if (this.noteLayoutTokens == null) this.noteLayoutTokens = new ArrayList<LayoutToken>(); noteLayoutTokens.add(token); } public void addAllNoteLayoutTokens(List<LayoutToken> tokens) { if (this.noteLayoutTokens == null) this.noteLayoutTokens = new ArrayList<LayoutToken>(); noteLayoutTokens.addAll(tokens); } public void setLabeledNote(String labeledNote) { this.labeledNote = labeledNote; } public String getLabeledNote() { return this.labeledNote; } private boolean validateTable() { CntManager cnt = Engine.getCntManager(); if (StringUtils.isEmpty(label) || StringUtils.isEmpty(header) || StringUtils.isEmpty(content)) { cnt.i(TableRejectionCounters.EMPTY_LABEL_OR_HEADER_OR_CONTENT); return false; } try { Integer.valueOf(getLabel().trim(), 10); } catch (NumberFormatException e) { cnt.i(TableRejectionCounters.CANNOT_PARSE_LABEL_TO_INT); return false; } if (!getHeader().toLowerCase().startsWith("table")) { cnt.i(TableRejectionCounters.HEADER_NOT_STARTS_WITH_TABLE_WORD); return false; } return true; } private boolean badTableAdvancedCheck() { CntManager cnt = Engine.getCntManager(); BoundingBox contentBox = BoundingBoxCalculator.calculateOneBox(contentTokens, true); BoundingBox descBox = BoundingBoxCalculator.calculateOneBox(fullDescriptionTokens, true); if (contentBox.getPage() != descBox.getPage()) { cnt.i(TableRejectionCounters.HEADER_AND_CONTENT_DIFFERENT_PAGES); return true; } if (contentBox.intersect(descBox)) { cnt.i(TableRejectionCounters.HEADER_AND_CONTENT_INTERSECT); return true; } if (descBox.area() > contentBox.area()) { cnt.i(TableRejectionCounters.HEADER_AREA_BIGGER_THAN_CONTENT); return true; } if (contentBox.getHeight() < 40) { cnt.i(TableRejectionCounters.CONTENT_SIZE_TOO_SMALL); return true; } if (contentBox.getWidth() < 100) { cnt.i(TableRejectionCounters.CONTENT_WIDTH_TOO_SMALL); return true; } if (contentTokens.size() < 10) { cnt.i(TableRejectionCounters.FEW_TOKENS_IN_CONTENT); return true; } if (fullDescriptionTokens.size() < 5) { cnt.i(TableRejectionCounters.FEW_TOKENS_IN_HEADER); return true; } return false; } public List<LayoutToken> getContentTokens() { return contentTokens; } public List<LayoutToken> getFullDescriptionTokens() { return fullDescriptionTokens; } public boolean isGoodTable() { return goodTable; } public String getTeiId() { return "tab_" + this.id; } }
15,762
36.002347
136
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/DataSetContext.java
package org.grobid.core.data; /** * Representing the context of a reference (to biblio/formula/table/figure) */ public class DataSetContext { public String context; private String documentCoords; private String teiId; public String getContext() { return context; } public void setContext(String context) { this.context = context; } public String getDocumentCoords() { return documentCoords; } public void setDocumentCoords(String documentCoords) { this.documentCoords = documentCoords; } public String getTeiId() { return teiId; } public void setTeiId(String teiId) { this.teiId = teiId; } }
709
19.285714
75
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/Person.java
package org.grobid.core.data; import org.apache.commons.lang3.StringUtils; import nu.xom.Attribute; import nu.xom.Element; import org.grobid.core.document.xml.XmlBuilderUtils; import org.grobid.core.layout.LayoutToken; import org.grobid.core.utilities.LayoutTokensUtil; import org.grobid.core.utilities.TextUtilities; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; /** * Class for representing and exchanging person information, e.g. author or editor. * */ public class Person { private String firstName = null; private String middleName = null; private String lastName = null; private String title = null; private String suffix = null; private String rawName = null; // raw full name if relevant/available, e.g. name exactly as displayed private String orcid = null; private boolean corresp = false; private List<LayoutToken> layoutTokens = new ArrayList<>(); private List<String> affiliationBlocks = null; private List<Affiliation> affiliations = null; private List<String> affiliationMarkers = null; private List<String> markers = null; private String email = null; public String getFirstName() { return firstName; } public void setFirstName(String f) { firstName = f; } public String getMiddleName() { return middleName; } public void setMiddleName(String f) { middleName = f; } public String getLastName() { return lastName; } public void setLastName(String f) { lastName = f; } public String getRawName() { return rawName; } public void setRawName(String name) { rawName = name; } public String getTitle() { return title; } public void setTitle(String f) { if (f != null) { while (f.startsWith("(")) { f = f.substring(1,f.length()); } while (f.endsWith(")")) { f = f.substring(0,f.length()-1); } } title = f; } public String getSuffix() { return suffix; } public void setSuffix(String s) { suffix = s; } public boolean getCorresp() { return corresp; } public void setCorresp(boolean b) { corresp = b; } public String getORCID() { return orcid; } public void setORCID(String id) { if (id == null) return; if (id.startsWith("http://orcid.org/")) id = id.replace("http://orcid.org/", ""); else if (id.startsWith("https://orcid.org/")) id = id.replace("https://orcid.org/", ""); orcid = id; } public List<String> getAffiliationBlocks() { return affiliationBlocks; } public void setAffiliationBlocks(List<String> blocks) { this.affiliationBlocks = blocks; } public void addAffiliationBlocks(String f) { if (affiliationBlocks == null) affiliationBlocks = new ArrayList<String>(); affiliationBlocks.add(f); } public List<org.grobid.core.data.Affiliation> getAffiliations() { return affiliations; } public void addAffiliation(org.grobid.core.data.Affiliation f) { if (affiliations == null) affiliations = new ArrayList<>(); affiliations.add(f); } public List<String> getAffiliationMarkers() { return affiliationMarkers; } public void setAffiliationMarkers(List<String> affiliationMarkers) { this.affiliationMarkers = affiliationMarkers; } public void addAffiliationMarker(String s) { if (affiliationMarkers == null) affiliationMarkers = new ArrayList<String>(); affiliationMarkers.add(s); } public void setAffiliations(List<org.grobid.core.data.Affiliation> f) { affiliations = f; } public List<String> getMarkers() { return markers; } public void setMarkers(List<String> markers) { this.markers = markers; } public void addMarker(String f) { if (markers == null) markers = new ArrayList<String>(); f = f.replace(" ", ""); markers.add(f); } public String getEmail() { return email; } public void setEmail(String f) { email = f; } public boolean notNull() { if ((firstName == null) && (middleName == null) && (lastName == null) && (title == null) ) return false; else return true; } /** * Create a new instance of Person object from current instance (shallow copy) */ public Person clonePerson() { Person person = new Person(); person.firstName = this.firstName ; person.middleName = this.middleName; person.lastName = this.lastName; person.title = this.title; person.suffix = this.suffix; person.rawName = this.rawName; person.orcid = this.orcid; person.corresp = this.corresp; person.email = this.email; if (this.layoutTokens != null) person.layoutTokens = new ArrayList<>(this.layoutTokens); if (this.affiliationBlocks != null) person.affiliationBlocks = new ArrayList<>(this.affiliationBlocks); if (this.affiliations != null) person.affiliations = new ArrayList<>(this.affiliations); if (this.affiliationMarkers != null) person.affiliationMarkers = new ArrayList<>(this.affiliationMarkers); if (this.markers != null) person.markers = new ArrayList<>(this.markers); return person; } public String toString() { String res = ""; if (title != null) res += title + " "; if (firstName != null) res += firstName + " "; if (middleName != null) res += middleName + " "; if (lastName != null) res += lastName + " "; if (suffix != null) res += suffix; if (email != null) { res += " (email:" + email + ")"; } if (orcid != null) { res += " (orcid:" + orcid + ")"; } if (affiliations != null) { for(Affiliation aff : affiliations) { res += " (affiliation: " + aff.toString() + ") "; } } return res.trim(); } public List<LayoutToken> getLayoutTokens() { return layoutTokens; } public void setLayoutTokens(List<LayoutToken> tokens) { this.layoutTokens = tokens; } /** * TEI serialization via xom. */ public void addLayoutTokens(List<LayoutToken> theTokens) { if (layoutTokens == null) { layoutTokens = new ArrayList<LayoutToken>(); } layoutTokens.addAll(theTokens); } public String toTEI(boolean withCoordinates) { if ( (firstName == null) && (middleName == null) && (lastName == null) ) { return null; } Element persElement = XmlBuilderUtils.teiElement("persName"); if (withCoordinates && (getLayoutTokens() != null) && (!getLayoutTokens().isEmpty())) { XmlBuilderUtils.addCoords(persElement, LayoutTokensUtil.getCoordsString(getLayoutTokens())); } if (title != null) { persElement.appendChild(XmlBuilderUtils.teiElement("roleName", title)); } if (firstName != null) { Element forename = XmlBuilderUtils.teiElement("forename", firstName); forename.addAttribute(new Attribute("type", "first")); persElement.appendChild(forename); } if (middleName != null) { Element mn = XmlBuilderUtils.teiElement("forename", middleName); mn.addAttribute(new Attribute("type", "middle")); persElement.appendChild(mn); } if (lastName != null) { persElement.appendChild(XmlBuilderUtils.teiElement("surname", lastName)); } if (suffix != null) { persElement.appendChild(XmlBuilderUtils.teiElement("genName", suffix)); } return XmlBuilderUtils.toXml(persElement); } /** * TEI serialization based on string builder, it allows to avoid namespaces and to better control * the formatting. */ public String toTEI(boolean withCoordinates, int indent) { if ( (firstName == null) && (middleName == null) && (lastName == null) ) { return null; } StringBuilder tei = new StringBuilder(); for (int i = 0; i < indent; i++) { tei.append("\t"); } tei.append("<persName"); if (withCoordinates && (getLayoutTokens() != null) && (!getLayoutTokens().isEmpty())) { tei.append(" "); tei.append(LayoutTokensUtil.getCoordsString(getLayoutTokens())); } tei.append(">\n"); if (!StringUtils.isEmpty(title)) { for (int i = 0; i < indent+1; i++) { tei.append("\t"); } tei.append("<roleName>"+TextUtilities.HTMLEncode(title)+"</roleName>\n"); } if (!StringUtils.isEmpty(firstName)) { for (int i = 0; i < indent+1; i++) { tei.append("\t"); } tei.append("<forename type=\"first\">"+TextUtilities.HTMLEncode(firstName)+"</forename>\n"); } if (!StringUtils.isEmpty(middleName)) { for (int i = 0; i < indent+1; i++) { tei.append("\t"); } tei.append("<forename type=\"middle\">"+TextUtilities.HTMLEncode(middleName)+"</forename>\n"); } if (!StringUtils.isEmpty(lastName)) { for (int i = 0; i < indent+1; i++) { tei.append("\t"); } tei.append("<surname>"+TextUtilities.HTMLEncode(lastName)+"</surname>\n"); } if (!StringUtils.isEmpty(suffix)) { for (int i = 0; i < indent+1; i++) { tei.append("\t"); } tei.append("<genName>"+TextUtilities.HTMLEncode(suffix)+"</genName>\n"); } for (int i = 0; i < indent; i++) { tei.append("\t"); } tei.append("</persName>"); return tei.toString(); } // list of character delimiters for capitalising names private static final String NAME_DELIMITERS = "-.,;:/_ "; /*static public String normalizeName(String inputName) { return TextUtilities.capitalizeFully(inputName, NAME_DELIMITERS); }*/ /** * This normalisation takes care of uniform case for name components and for * transforming agglutinated initials (like "JM" in JM Smith) * which are put into the firstname into separate initials in first and middle names. * */ public void normalizeName() { if (StringUtils.isEmpty(middleName) && !StringUtils.isEmpty(firstName) && (firstName.length() == 2) && (TextUtilities.isAllUpperCase(firstName)) ) { middleName = firstName.substring(1,2); firstName = firstName.substring(0,1); } firstName = TextUtilities.capitalizeFully(firstName, NAME_DELIMITERS); middleName = TextUtilities.capitalizeFully(middleName, NAME_DELIMITERS); lastName = TextUtilities.capitalizeFully(lastName, NAME_DELIMITERS); } // assume never more than 3 initials //private Pattern initials = Pattern.compile("([A-Z])(?:\\.)\\s?(?:([A-Z])(?:\\.))?\\s?(?:([A-Z])(?:\\.))?"); /** * First names coming from CrossRef are clearly heavily impacted by the original puslisher * formats and a large variety of forms can be seen, with some information lost apparently. */ public void normalizeCrossRefFirstName() { // first name can be initial with a dot, e.g. "M." or without a dot // <forename type="first">H</forename> // fistname can be intials with appended middlename also as initials, // with or without space, e.g. "M. L." or // <forename type="first">L.S.</forename> // normal full first name can be appended with middlename initials with dots but // no space e.g. "Nicholas J.", "John W.S." // we have sldo destructive case normalization done at CrossRef or by publishers // like "Zs. Biró" String first = null; String middle = null; /*Matcher m = initials.matcher(firstName); while(m.find()) { count++; System.out.println("Match number "+count); System.out.println("start(): "+m.start()); System.out.println("end(): "+m.end()); if (count != 0) { } }*/ firstName = firstName.replace(".", ". "); firstName = StringUtils.normalizeSpace(firstName); // check first the specific case "Zs. Biró" - given the we've never observed three // letters first name like "Zsv. Biró" if ( firstName.endsWith(".") && (firstName.length() == 3) && Character.isUpperCase(firstName.charAt(0)) && Character.isLowerCase(firstName.charAt(1)) ) { middleName = firstName.substring(1,2); firstName = firstName.substring(0,1); } // check the specific case of composed forenames which are often but not always lost // ex: "J.-L. Arsuag" if ( (firstName.indexOf("-") != -1) ) { String tokens[] = firstName.replace(" ", "").split("-"); if (tokens.length == 2) { if (tokens[0].endsWith(".") && (tokens[0].length() == 2)) first = ""+tokens[0].charAt(0); else if (tokens[0].length() == 1) first = tokens[0]; if (tokens[1].endsWith(".") && (tokens[1].length() == 2)) first += "-" + tokens[1].charAt(0); else if (tokens[1].length() == 1) first += "-" + tokens[1]; } } else { String tokens[] = firstName.split(" "); for(int i=tokens.length-1; i>=0; i--) { if (i != 0) { if (first != null) { if (tokens[i].endsWith(".") && (tokens[i].length() == 2)) { // (case "G. Arjen") first = tokens[i].charAt(0) + " " + first; } else { // multiple token first name first = tokens[i] + " " + first; } } else if ( (tokens[i].endsWith(".") && (tokens[i].length() == 2)) || (tokens[i].length() == 1) ) { // we have an initials in secondary position, this is a middle name if (middle == null) middle = ""+tokens[i].charAt(0); else middle = tokens[i].charAt(0) + " " + middle; } else { if (middle == null) middle = tokens[i]; else middle = tokens[i] + " " + middle; } } else { // we check if we have an initial at the beginning (case "G. Arjen") if (tokens[i].endsWith(".") && (tokens[i].length() == 2)) { if (first == null) first = ""+tokens[i].charAt(0); else first = tokens[i] + " " + first; } else { if (first == null) first = tokens[i]; else first = tokens[i] + " " + first; } } } } if (first != null) firstName = first; if (middle != null) middleName = middle; // dirty case <forename type="first">HermanHG</forename><surname>Teerink</surname> if ( (firstName != null) && (middleName == null) && (firstName.length()>2) && Character.isUpperCase(firstName.charAt(firstName.length()-1)) && Character.isLowerCase(firstName.charAt(1)) ) { int i = firstName.length()-1; while(i>1) { if (Character.isUpperCase(firstName.charAt(i))) { if (middleName == null) middleName = ""+firstName.charAt(i); else middleName = firstName.charAt(i) + " " + middleName; } else break; i--; } firstName = firstName.substring(0, i+1); } // for cases like JM Smith and for case normalisation normalizeName(); // cleaning for CrossRef middlenames if (middleName != null) { middleName = middleName.replace(".", ". "); middleName = middleName.replace(" ", " "); } // other weird stuff: <forename type="first">G. Arjen</forename><surname>de Groot</surname> // also note that language specific case practice are usually not expected // e.g. H Von Allmen, J De } /** * Return true if the person structure is a valid person name, in our case * with at least a lastname or a raw name. */ public boolean isValid() { if ( (lastName == null) && (rawName == null) ) return false; else return true; } /** * Deduplicate person names, optionally attached to affiliations, based * on common forename/surname, taking into account abbreviated forms */ public static List<Person> deduplicate(List<Person> persons) { if (persons == null) return null; if (persons.size() == 0) return persons; // we create a signature per person based on lastname and first name first letter Map<String,List<Person>> signatures = new TreeMap<String,List<Person>>(); for(Person person : persons) { if (person.getLastName() == null || person.getLastName().trim().length() == 0) { // the minimal information to deduplicate is not available continue; } String signature = person.getLastName().toLowerCase(); if (person.getFirstName() != null && person.getFirstName().trim().length() != 0) { signature += "_" + person.getFirstName().substring(0,1); } List<Person> localPersons = signatures.get(signature); if (localPersons == null) { localPersons = new ArrayList<Person>(); } localPersons.add(person); signatures.put(signature, localPersons); } // match signature and check possible affiliation information for (Map.Entry<String,List<Person>> entry : signatures.entrySet()) { List<Person> localPersons = entry.getValue(); if (localPersons.size() > 1) { // candidate for deduplication, check full forenames and middlenames to check if there is a clash List<Person> newLocalPersons = new ArrayList<Person>(); for(int j=0; j < localPersons.size(); j++) { Person localPerson = localPersons.get(j); String localFirstName = localPerson.getFirstName(); if (localFirstName != null) { localFirstName = localFirstName.toLowerCase(); localFirstName = localFirstName.replaceAll("[\\-\\.]", ""); } String localMiddleName = localPerson.getMiddleName(); if (localMiddleName != null) { localMiddleName = localMiddleName.toLowerCase(); localMiddleName = localMiddleName.replaceAll("[\\-\\.]", ""); } int nbClash = 0; for(int k=0; k < localPersons.size(); k++) { boolean clash = false; if (k == j) continue; Person otherPerson = localPersons.get(k); String otherFirstName = otherPerson.getFirstName(); if (otherFirstName != null) { otherFirstName = otherFirstName.toLowerCase(); otherFirstName = otherFirstName.replaceAll("[\\-\\.]", ""); } String otherMiddleName = otherPerson.getMiddleName(); if (otherMiddleName != null) { otherMiddleName = otherMiddleName.toLowerCase(); otherMiddleName = otherMiddleName.replaceAll("[\\-\\.]", ""); } // test first name clash if (localFirstName != null && otherFirstName != null) { if (localFirstName.length() == 1 && otherFirstName.length() == 1) { if (!localFirstName.equals(otherFirstName)) { clash = true; } } else { if (!localFirstName.equals(otherFirstName) && !localFirstName.startsWith(otherFirstName) && !otherFirstName.startsWith(localFirstName) ) { clash = true; } } } // test middle name clash if (!clash) { if (localMiddleName != null && otherMiddleName != null) { if (localMiddleName.length() == 1 && otherMiddleName.length() == 1) { if (!localMiddleName.equals(otherMiddleName)) { clash = true; } } else { if (!localMiddleName.equals(otherMiddleName) && !localMiddleName.startsWith(otherMiddleName) && !otherMiddleName.startsWith(localMiddleName) ) { clash = true; } } } } if (clash) { // increase the clash number for index j nbClash++; } } if (nbClash == 0) { newLocalPersons.add(localPerson); } } localPersons = newLocalPersons; if (localPersons.size() > 1) { // if identified duplication, keep the most complete person form and the most complete // affiliation information Person localPerson = localPersons.get(0); String localFirstName = localPerson.getFirstName(); if (localFirstName != null) localFirstName = localFirstName.toLowerCase(); String localMiddleName = localPerson.getMiddleName(); if (localMiddleName != null) localMiddleName = localMiddleName.toLowerCase(); String localTitle = localPerson.getTitle(); if (localTitle != null) localTitle = localTitle.toLowerCase(); String localSuffix = localPerson.getSuffix(); if (localSuffix != null) localSuffix = localSuffix.toLowerCase(); List<Affiliation> aff = localPerson.getAffiliations(); for (int i=1; i<localPersons.size(); i++) { Person otherPerson = localPersons.get(i); // try to enrich first Person object String otherFirstName = otherPerson.getFirstName(); if (otherFirstName != null) otherFirstName = otherFirstName.toLowerCase(); String otherMiddleName = otherPerson.getMiddleName(); if (otherMiddleName != null) otherMiddleName = otherMiddleName.toLowerCase(); String otherTitle = otherPerson.getTitle(); if (otherTitle != null) otherTitle = otherTitle.toLowerCase(); String otherSuffix = otherPerson.getSuffix(); if (otherSuffix != null) otherSuffix = otherSuffix.toLowerCase(); if ((localFirstName == null && otherFirstName != null) || (localFirstName != null && otherFirstName != null && otherFirstName.length() > localFirstName.length())) { localPerson.setFirstName(otherPerson.getFirstName()); localFirstName = localPerson.getFirstName().toLowerCase(); } if ((localMiddleName == null && otherMiddleName != null) || (localMiddleName != null && otherMiddleName != null && otherMiddleName.length() > localMiddleName.length())) { localPerson.setMiddleName(otherPerson.getMiddleName()); localMiddleName = localPerson.getMiddleName().toLowerCase(); } if ((localTitle == null && otherTitle != null) || (localTitle != null && otherTitle != null && otherTitle.length() > localTitle.length())) { localPerson.setTitle(otherPerson.getTitle()); localTitle = localPerson.getTitle().toLowerCase(); } if ((localSuffix == null && otherSuffix != null) || (localSuffix != null && otherSuffix != null && otherSuffix.length() > localSuffix.length())) { localPerson.setSuffix(otherPerson.getSuffix()); localSuffix = localPerson.getSuffix().toLowerCase(); } String otherOrcid = otherPerson.getORCID(); if (otherOrcid != null) localPerson.setORCID(otherOrcid); if (otherPerson.getAffiliations() != null) { for(Affiliation affOther : otherPerson.getAffiliations()) { localPerson.addAffiliation(affOther); } } if (otherPerson.getAffiliationBlocks() != null) { for(String block : otherPerson.getAffiliationBlocks()) { localPerson.addAffiliationBlocks(block); } } if (otherPerson.getMarkers() != null) { for(String marker : otherPerson.getMarkers()) { if (localPerson.getMarkers() == null || !localPerson.getMarkers().contains(marker)) localPerson.addMarker(marker); } } if (localPerson.getEmail() == null) localPerson.setEmail(otherPerson.getEmail()); if (persons.contains(otherPerson)) persons.remove(otherPerson); } } } } return persons; } /** * Remove invalid/impossible person names (no last names, noise, etc.) */ public static List<Person> sanityCheck(List<Person> persons) { if (persons == null) return null; if (persons.size() == 0) return persons; List<Person> result = new ArrayList<Person>(); for(Person person : persons) { if (person.getLastName() == null || person.getLastName().trim().length() == 0) continue; else result.add(person); } return result; } }
29,269
36.719072
115
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/Classification.java
package org.grobid.core.data; import java.util.*; /** * Class for representing a classification. * */ public class Classification { private String classificationScheme = null; private List<String> classes = null; private String rawString = null; public String getClassificationScheme() { return classificationScheme; } public void setClassificationScheme(String s) { classificationScheme = s; } public List<String> getClasses() { return classes; } public void setClasses(List<String> c) { classes = c; } public String getRawString() { return rawString; } public void setRawString(String s) { rawString = s; } }
731
17.769231
51
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/ChemicalEntity.java
package org.grobid.core.data; import org.grobid.core.utilities.OffsetPosition; /** * Class for managing chemical entities. * */ public class ChemicalEntity { // attribute String rawName = null; String inchi = null; String smiles = null; OffsetPosition offsets = null; public ChemicalEntity() { offsets = new OffsetPosition(); } public ChemicalEntity(String raw) { offsets = new OffsetPosition(); this.rawName = raw; } public String getRawName() { return rawName; } public String getInchi() { return inchi; } public String getSmiles() { return smiles; } public void setRawName(String raw) { this.rawName = raw; } public void setInchi(String inchi) { this.inchi = inchi; } public void setSmiles(String smiles) { this.smiles = smiles; } public void setOffsetStart(int start) { offsets.start = start; } public int getOffsetStart() { return offsets.start; } public void setOffsetEnd(int end) { offsets.end = end; } public int getOffsetEnd() { return offsets.end; } public String toString() { StringBuffer buffer = new StringBuffer(); buffer.append(rawName + "\t" + inchi + "\t" + smiles + "\t" + offsets.toString()); return buffer.toString(); } // TODO: CML encoding }
1,437
18.432432
90
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/PriorArtCitation.java
package org.grobid.core.data; import java.util.*; /** * Class for managing citation of patent bibliographical references. * */ public class PriorArtCitation { // cited patent, null if not a patent private PatentItem patent = null; // cited nlp, null if not a npl private BiblioItem npl = null; private List<Passage> passages = null; private String category = null; private String comment = null; private String rawCitation = null; private String rawClaims = null; public PatentItem getPatent() { return patent; } public void setPatent(PatentItem item) { patent = item; } public BiblioItem getNPL() { return npl; } public void setNPL(BiblioItem item) { npl = item; } public List<Passage> getPassages() { return passages; } public void setPassages(List<Passage> pass) { passages = pass; } public String getCategory() { return category; } public void setCategory(String cat) { category = cat; } public String getComment() { return comment; } public void setComment(String comm) { comment = comm; } public String getRawCitation() { return rawCitation; } public void setRawCitation(String raw) { rawCitation = raw; } public String getRawClaims() { return rawClaims; } public void setRawClaims(String raw) { rawClaims = raw; } // TODO: TEI based encoding }
1,537
17.53012
68
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/BibDataSet.java
package org.grobid.core.data; import java.util.*; import org.grobid.core.engines.config.GrobidAnalysisConfig; /** * Structure for representing the different information for a citation and its different context of citation. * */ public class BibDataSet { public enum Counters { CITATIONS_CNT, CITATIONS_WITH_CONTEXT_CNT, CITATIONS_WITHOUT_CONTEXT_CNT } private BiblioItem resBib = null; // identified parsed bibliographical item private List<String> sourceBib = null; // the context window (raw text) where the bibliographical item is cited private String refSymbol = null; // reference marker in the text body private String rawBib = null; // raw text of the bibliographical item private double confidence = 1.0; // confidence score of the extracted bibiliographical item private List<Integer> offsets = null; // list of offsets corresponding to the position of the reference //private List<grisp.nlp.Term> terms = null; // set of terms describing the reference obtained in the citation context public BibDataSet() { } public void setResBib(BiblioItem res) { resBib = res; } public void addSourceBib(String sentence) { if (sourceBib == null) sourceBib = new ArrayList<String>(); //sourceBib.add(org.grobid.core.utilities.TextUtilities.HTMLEncode(sentence)); sourceBib.add(sentence); } public void setRawBib(String s) { //rawBib = org.grobid.core.utilities.TextUtilities.HTMLEncode(s); rawBib = s; } public void setRefSymbol(String s) { refSymbol = s; } //public void setTerms(List<grisp.nlp.Term> a) { terms = a; } public void setConfidence(double c) { confidence = c; } public BiblioItem getResBib() { return resBib; } public String getRawBib() { return rawBib; } public String getRefSymbol() { return refSymbol; } public List<String> getSourceBib() { return sourceBib; } //public List<grisp.nlp.Term> getTerms() { return terms; } public double getConfidence() { return confidence; } public void addOffset(int begin) { if (offsets == null) { offsets = new ArrayList<Integer>(); } offsets.add(begin); } public void addOffset(Integer begin) { if (offsets == null) { offsets = new ArrayList<Integer>(); } offsets.add(begin); } public List<Integer> getOffsets() { return offsets; } @Override public String toString() { return "BibDataSet [resBib=" + resBib.toString() + ", sourceBib=" + sourceBib + ", refSymbol=" + refSymbol + ", rawBib=" + rawBib + ", confidence=" + confidence + ", offsets=" + offsets + "]"; } public String toTEI() { return toTEI(false); } public String toTEI(boolean includeRawCitations) { if (resBib != null) { GrobidAnalysisConfig config = GrobidAnalysisConfig.builder() .includeRawCitations(includeRawCitations) .build(); return resBib.toTEI(-1, 0, config); } else { return ""; } } public String toTEI(int p) { return toTEI(p, false); } public String toTEI(int p, boolean includeRawCitations) { if (resBib != null) { GrobidAnalysisConfig config = GrobidAnalysisConfig.builder() .includeRawCitations(includeRawCitations) .build(); return resBib.toTEI(p, 0, config); } else { return ""; } } }
3,668
26.795455
109
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/PatentItem.java
package org.grobid.core.data; import java.util.List; import org.grobid.core.layout.BoundingBox; import org.grobid.core.utilities.TextUtilities; /** * Class for managing patent bibliographical references. * */ public class PatentItem implements Comparable<PatentItem> { // attribute private String authority = null; private String number_epodoc = null; private String number_wysiwyg = null; private String kindCode = null; // patent type when applicable private Boolean application = false; private Boolean provisional = false; private Boolean reissued = false; private Boolean plant = false; private Boolean design = false; private Boolean utility = false; // scores private double conf = 1.0; private String confidence = null; // position in document private int offset_begin = 0; private int offset_end = 0; // position in raw string (in case of factorised numbers) private int offset_raw = 0; // context of occurrence of the reference private String context = null; // coordinates in the orignal layout (for PDF) private List<BoundingBox> coordinates = null; public String getAuthority() { return authority; } public String getNumberEpoDoc() { return number_epodoc; } public String getNumberWysiwyg() { return number_wysiwyg; } public String getKindCode() { return kindCode; } public Boolean getApplication() { return application; } public Boolean getProvisional() { return provisional; } public Boolean getReissued() { return reissued; } public Boolean getPlant() { return plant; } public Boolean getDesign() { return design; } public Boolean getUtility() { return design; } public double getConf() { return conf; } public String getConfidence() { return confidence; } public int getOffsetBegin() { return offset_begin; } public int getOffsetEnd() { return offset_end; } public int getOffsetRaw() { return offset_raw; } /** * Context of occurrence of the reference */ public String getContext() { return context; } public void setContext(String cont) { context = cont; } public void setOffsetBegin(int ofs) { offset_begin = ofs; } public void setOffsetEnd(int ofs) { offset_end = ofs; } public void setOffsetRaw(int ofs) { offset_raw = ofs; } public void setKindCode(String kc) { kindCode = kc; } public void setNumberEpoDoc(String num) { number_epodoc = num; } public void setNumberWysiwyg(String num) { number_wysiwyg = num; } public void setAuthority(String s) { authority = s; } public void setApplication(boolean b) { application = b; } public void setProvisional(boolean b) { provisional = b; } public void setReissued(boolean b) { reissued = b; } public void setPlant(boolean b) { plant = b; } public void setDesign(boolean b) { design = b; } public void setUtility(boolean b) { utility = b; } public void setConf(double val) { conf = val; } public int compareTo(PatentItem another) { return number_wysiwyg.compareTo(another.getNumberWysiwyg()); } private final static String espacenet = "http://v3.espacenet.com/publicationDetails/biblio?DB=EPODOC"; private final static String espacenet2 = "http://v3.espacenet.com/searchResults?DB=EPODOC"; private final static String epoline = "https://register.epoline.org/espacenet/application?number="; private final static String epoline2 = "https://register.epoline.org/espacenet/simpleSearch?index[0]=publication&value[0]="; private final static String epoline3 = "&index[1]=&value[1]=&index[2]=&value[2]=&searchMode=simple&recent="; public String getEspacenetURL() { String res = null; if (provisional) { res = espacenet2 + "&PR=" + authority + number_epodoc + "P"; } else if (application) { res = espacenet2 + "&AP=" + authority + number_epodoc; } else { res = espacenet + "&CC=" + authority + "&NR=" + number_epodoc; } return res; } public String getEpolineURL() { String res = null; if (application) { res = epoline + authority + number_epodoc; } else { // we need the application number corresponding to the publication res = epoline2 + authority + number_epodoc + epoline3; } return res; } public String getType() { if (application) return "application"; if (provisional) return "provisional"; if (reissued) return "reissued"; if (plant) return "plant"; if (design) return "design"; // default return "publication"; } public void setType(String type) { if (type.equals("publication")) return; if (type.equals("application")) { application = true; } else if (type.equals("provisional")) { provisional = true; } else if (type.equals("reissued")) { reissued = true; } else if (type.equals("plant")) { plant = true; } else if (type.equals("design")) { design = true; } } @Override public String toString() { return "PatentItem [authority=" + authority + ", number_wysiwyg=" + number_wysiwyg + ", number_epodoc=" + number_epodoc + ", kindCode=" + kindCode + ", application=" + application + ", provisional=" + provisional + ", reissued=" + reissued + ", plant=" + plant + ", design=" + design + ", conf=" + conf + ", confidence=" + confidence + ", offset_begin=" + offset_begin + ", offset_end=" + offset_end + ", offset_raw=" + offset_raw + ", context=" + context + "]"; } public String toTEI() { return toTEI(null, false, null); } public String toTEI(boolean withPtr, String ptrVal) { return toTEI(null, withPtr, ptrVal); } public String toTEI(String date) { return toTEI(date, false, null); } public String toTEI(String date, boolean withPtr, String ptrVal) { /* TEI for patent bilbiographical data is as follow (After the TEI guideline update of October 2012): <biblStruct type="patent¦utilityModel¦designPatent¦plant" status="application¦publication"> <monogr> <authority> <orgName type="national¦regional">[name of patent office]<orgName> (mandatory) </authority> <idno type="docNumber">[patent document number]</idno> (mandatory) <imprint> (optional) <classCode scheme="kindCode">[kind code]</classCode> (optional) <date>[date]</date> (optional) </imprint> </monogr> </biblStruct> */ StringBuffer biblStruct = new StringBuffer(); // type of patent biblStruct.append("<biblStruct type=\""); if (design) { biblStruct.append("designPatent"); } else if (plant) { biblStruct.append("plant"); } else if (utility) { biblStruct.append("utilityModel"); } else { biblStruct.append("patent"); } // status biblStruct.append("\" status=\""); if (application) { biblStruct.append("application"); } else if (provisional) { biblStruct.append("provisional"); } else if (reissued) { biblStruct.append("reissued"); } else { biblStruct.append("publication"); } biblStruct.append("\">"); biblStruct.append("<monogr><authority><orgName type=\""); if (authority.equals("EP") || authority.equals("WO") || authority.equals("XN") || authority.equals("XN") || authority.equals("GC") || authority.equals("EA") ) { // XN is the Nordic Patent Institute // OA is the African Intellectual Property Organization (OAPI) // GC is the Gulf Cooperation Council // EA Eurasian Patent Organization biblStruct.append("regional"); } else { biblStruct.append("national"); } biblStruct.append("\">"+TextUtilities.HTMLEncode(authority)+"</orgName></authority>"); biblStruct.append("<idno type=\"docNumber\" subtype=\"epodoc\">"+TextUtilities.HTMLEncode(number_epodoc)+"</idno>"); biblStruct.append("<idno type=\"docNumber\" subtype=\"original\">"+TextUtilities.HTMLEncode(number_wysiwyg)+"</idno>"); if ((kindCode != null) || (date != null)) { biblStruct.append("<imprint>"); if (kindCode != null) { biblStruct.append("<classCode scheme=\"kindCode\">"+TextUtilities.HTMLEncode(kindCode)+"</classCode>"); } if (date != null) { biblStruct.append("<date>"+TextUtilities.HTMLEncode(date)+"</date>"); } biblStruct.append("</imprint>"); } if (withPtr) { biblStruct.append("<ptr target=\"#string-range('" + ptrVal + "',"+ offset_begin +","+ (offset_end - offset_begin + 1) +")\"></ptr>"); } if (conf != 0.0) { biblStruct.append("<certainty degree=\"" + conf +"\" />"); } biblStruct.append("</monogr>"); biblStruct.append("</biblStruct>"); return biblStruct.toString(); } public String toJson(String date, boolean withCoordinates) { StringBuilder json = new StringBuilder(); json.append("{"); json.append("\"type\": "); if (design) { json.append("\"designPatent\""); } else if (plant) { json.append("\"plant\""); } else if (utility) { json.append("\"utilityModel\""); } else { json.append("\"patent\""); } json.append(", \"status\": "); if (application) { json.append("\"application\""); } else if (provisional) { json.append("\"provisional\""); } else if (reissued) { json.append("\"reissued\""); } else { json.append("\"publication\""); } json.append(", \"authority\": { \"name\": \"").append(authority).append("\", \"type\": \""); if (authority.equals("EP") || authority.equals("WO") || authority.equals("XN") || authority.equals("XN") || authority.equals("GC") || authority.equals("EA") ) { // XN is the Nordic Patent Institute // OA is the African Intellectual Property Organization (OAPI) // GC is the Gulf Cooperation Council // EA Eurasian Patent Organization json.append("regional"); } else { json.append("national"); } json.append("\"}"); json.append(", \"number\": {"); if (number_wysiwyg != null) { json.append("\"original\" : \"").append(number_wysiwyg).append("\""); if (number_epodoc != null) json.append(", "); } if (number_epodoc != null) json.append("\"epodoc\" : \"").append(number_epodoc).append("\""); json.append("}"); if (kindCode != null) { json.append(", \"kindCode\" : \"").append(kindCode).append("\""); } if (date != null) { json.append(", \"date\" : \"").append(date).append("\""); } if ( withCoordinates && (coordinates != null) && (coordinates.size() > 0) ) { json.append(", \"pos\": ["); boolean first = true; for (BoundingBox b : coordinates) { if (first) first = false; else json.append(","); json.append("{").append(b.toJson()).append("}"); } json.append("]"); } if ( (offset_begin != -1) && (offset_end != -1)) { json.append(", \"offset\": {"); json.append("\"begin\" : ").append(offset_begin).append(", "); json.append("\"end\" : ").append(offset_end); json.append("}"); } String url1 = getEspacenetURL(); String url2 = null; if (authority.equals("EP")) url2= getEpolineURL(); if ( (url1 != null) || (url2 != null) ) { json.append(", \"url\": {"); if (url1 != null) { json.append("\"espacenet\" : \"").append(url1).append("\""); if (url2 != null) json.append(", "); } if (url2 != null) json.append("\"epoline\" : \"").append(url2).append("\""); json.append("}"); } json.append("}"); return json.toString(); } public void setCoordinates(List<BoundingBox> coordinates) { this.coordinates = coordinates; } public List<BoundingBox> getCoordinates() { return coordinates; } }
12,208
25.36933
128
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/Metadata.java
package org.grobid.core.data; import java.util.List; public class Metadata { private String title = null; private String subject = null; private String keywords = null; private String author = null; private String creator = null; private String producer = null; private String createDate = null; private String modificationDate = null; public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getSubject() { return subject; } public void setSubject(String subject) { this.subject = subject; } public String getAuthor() { return author; } public void setAuthor(String author) { this.author = author; } public String getCreator() { return creator; } public void setCreator(String creator) { this.creator = creator; } public String getProducer() { return producer; } public void setProducer(String producer) { this.producer = producer; } public String getCreateDate() { return createDate; } public void setCreateDate(String createDate) { this.createDate = createDate; } public String getModificationDate() { return modificationDate; } public void setModificationDate(String modificationDate) { this.modificationDate = modificationDate; } public String getKeywords() { return keywords; } public void setKeywords(String keywords) { this.keywords = keywords; } }
1,622
19.544304
62
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/Figure.java
package org.grobid.core.data; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.base.Joiner; import org.grobid.core.GrobidModels; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.grobid.core.document.xml.XmlBuilderUtils; import org.grobid.core.document.Document; import org.grobid.core.document.TEIFormatter; import org.grobid.core.engines.config.GrobidAnalysisConfig; import org.grobid.core.layout.BoundingBox; import org.grobid.core.layout.GraphicObject; import org.grobid.core.layout.GraphicObjectType; import org.grobid.core.layout.LayoutToken; import org.grobid.core.layout.VectorGraphicBoxCalculator; import org.grobid.core.utilities.BoundingBoxCalculator; import org.grobid.core.utilities.LayoutTokensUtil; import org.grobid.core.utilities.TextUtilities; import org.grobid.core.tokenization.TaggingTokenCluster; import org.grobid.core.tokenization.TaggingTokenClusteror; import org.grobid.core.utilities.KeyGen; import org.grobid.core.engines.label.TaggingLabels; import org.grobid.core.engines.label.TaggingLabel; import org.grobid.core.engines.citations.CalloutAnalyzer.MarkerType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import nu.xom.Attribute; import nu.xom.Element; import nu.xom.Node; import nu.xom.Text; import java.net.URI; import java.util.ArrayList; import java.util.List; import java.util.SortedSet; import java.util.Collections; import static org.grobid.core.document.xml.XmlBuilderUtils.teiElement; import static org.grobid.core.document.xml.XmlBuilderUtils.addXmlId; import static org.grobid.core.document.xml.XmlBuilderUtils.textNode; /** * Class for representing a figure. * */ public class Figure { protected static final Logger LOGGER = LoggerFactory.getLogger(Figure.class); public static final Predicate<GraphicObject> GRAPHIC_OBJECT_PREDICATE = new Predicate<GraphicObject>() { @Override public boolean apply(GraphicObject graphicObject) { return graphicObject.getType() == GraphicObjectType.BITMAP; } }; public static final Predicate<GraphicObject> VECTOR_BOX_GRAPHIC_OBJECT_PREDICATE = new Predicate<GraphicObject>() { @Override public boolean apply(GraphicObject graphicObject) { return graphicObject.getType() == GraphicObjectType.VECTOR_BOX; } }; public static final Predicate<GraphicObject> BOXED_GRAPHIC_OBJECT_PREDICATE = new Predicate<GraphicObject>() { @Override public boolean apply(GraphicObject graphicObject) { return graphicObject.getType() == GraphicObjectType.BITMAP || graphicObject.getType() == GraphicObjectType.VECTOR_BOX; } }; protected StringBuilder caption = null; protected List<LayoutToken> captionLayoutTokens = new ArrayList<>(); protected String labeledCaption = null; protected StringBuilder header = null; protected StringBuilder content = null; protected StringBuilder label = null; protected String id = null; protected URI uri = null; protected int start = -1; // start position in the full text tokenization protected int end = -1; // end position in the full text tokenization protected LayoutToken startToken = null; // start layout token protected LayoutToken endToken = null; // end layout token private List<BoundingBox> textArea; private List<LayoutToken> layoutTokens; // coordinates private int page = -1; private double y = 0.0; private double x = 0.0; private double width = 0.0; private double height = 0.0; // list of graphic objects corresponding to the figure protected List<GraphicObject> graphicObjects = null; private SortedSet<Integer> blockPtrs; public Figure() { caption = new StringBuilder(); header = new StringBuilder(); content = new StringBuilder(); label = new StringBuilder(); } public void appendHeader(String head) { header.append(head); } public String getHeader() { return header.toString(); } public void appendCaption(String cap) { caption.append(cap); } public void appendCaptionLayoutTokens(List<LayoutToken> layoutTokens) { captionLayoutTokens.addAll(layoutTokens); } public String getCaption() { return caption.toString(); } public List<LayoutToken> getCaptionLayoutTokens() { return this.captionLayoutTokens; } public void setCaptionLayoutTokens(List<LayoutToken> tokens) { this.captionLayoutTokens = tokens; } public void setLabeledCaption(String labeledCaption) { this.labeledCaption = labeledCaption; } public String getLabeledCaption() { return this.labeledCaption; } public void appendLabel(String lab) { label.append(lab); } public String getLabel() { return label.toString(); } public void appendContent(String trash) { content.append(trash); } public String getContent() { return content.toString(); } public void setURI(URI theURI) { uri = theURI; } public void setStart(int start) { this.start = start; } public int getStart() { return start; } public void setEnd(int end) { this.end = end; } public int getEnd() { return end; } public void setStartToken(LayoutToken start) { this.startToken = start; } public LayoutToken getStartToken() { return startToken; } public void setEndToken(LayoutToken end) { this.endToken = end; } public LayoutToken getEndToken() { return endToken; } public void setId() { id = TextUtilities.cleanField(label.toString(), false); } public void setId(String theId) { id = theId; } public String getId() { return id; } public List<GraphicObject> getGraphicObjects() { return graphicObjects; } public List<GraphicObject> getBitmapGraphicObjects() { if (graphicObjects == null) { return null; } ArrayList<GraphicObject> graphicObjects = Lists.newArrayList(Iterables.filter(this.graphicObjects, GRAPHIC_OBJECT_PREDICATE)); if (graphicObjects.isEmpty()) { return null; } return graphicObjects; } public List<GraphicObject> getBoxedGraphicObjects() { if (graphicObjects == null) { return null; } ArrayList<GraphicObject> graphicObjects = Lists.newArrayList(Iterables.filter(this.graphicObjects, BOXED_GRAPHIC_OBJECT_PREDICATE)); if (graphicObjects.isEmpty()) { return null; } return graphicObjects; } public List<GraphicObject> getVectorBoxGraphicObjects() { if (graphicObjects == null) { return null; } ArrayList<GraphicObject> graphicObjects = Lists.newArrayList(Iterables.filter(this.graphicObjects, VECTOR_BOX_GRAPHIC_OBJECT_PREDICATE)); if (graphicObjects.isEmpty()) { return null; } return graphicObjects; } public void addGraphicObject(GraphicObject obj) { if (graphicObjects == null) graphicObjects = new ArrayList<GraphicObject>(); graphicObjects.add(obj); } public void setGraphicObjects(List<GraphicObject> objs) { graphicObjects = objs; } /** * Simple block coordinates */ public String getCoordinatesString() { return String.format("%d,%.2f,%.2f,%.2f,%.2f", page, x, y, width, height); } /** * Proper bounding boxes */ public List<BoundingBox> getCoordinates() { /*if (layoutTokens == null || layoutTokens.size() == 0) return null; else { BoundingBox oneBox = BoundingBoxCalculator.calculateOneBox(layoutTokens, true); List<BoundingBox> result = new ArrayList<BoundingBox>(); result.add(oneBox); return result; }*/ List<BoundingBox> theBoxes = null; // non graphic elements if (getLayoutTokens() != null && getLayoutTokens().size() > 0) { //theBoxes = BoundingBoxCalculator.calculate(getLayoutTokens()); BoundingBox oneBox = BoundingBoxCalculator.calculateOneBox(layoutTokens, true); List<BoundingBox> result = new ArrayList<BoundingBox>(); theBoxes = new ArrayList<>(); theBoxes.add(oneBox); } // if (getBitmapGraphicObjects() != null && !getBitmapGraphicObjects().isEmpty()) { // -> note: this was restricted to the bitmap objects only... the bounding box calculation // with vector graphics might need some double check // here we bound all figure graphics in one single box (given that we can have hundred graphics // in a single figure) BoundingBox theGraphicsBox = null; if ((graphicObjects != null) && (graphicObjects.size() > 0)) { for (GraphicObject graphicObject : graphicObjects) { if (theGraphicsBox == null) { theGraphicsBox = graphicObject.getBoundingBox(); } else { theGraphicsBox = theGraphicsBox.boundBoxExcludingAnotherPage(graphicObject.getBoundingBox()); } } } if (theGraphicsBox != null) { if (theBoxes == null) theBoxes = new ArrayList<>(); theBoxes.add(theGraphicsBox); } List<BoundingBox> result = new ArrayList<BoundingBox>(); if (theBoxes != null && theBoxes.size() > 0) { BoundingBox oneBox = BoundingBoxCalculator.calculateOneBox(layoutTokens, true); List<BoundingBox> mergedBox = VectorGraphicBoxCalculator.mergeBoxes(theBoxes); result.addAll(mergedBox); } Collections.sort(result); return result; } public String getTeiId() { return "fig_" + this.id; } public String toTEI(GrobidAnalysisConfig config, Document doc, TEIFormatter formatter, List<MarkerType> markerTypes) { if (StringUtils.isEmpty(header) && StringUtils.isEmpty(caption) && CollectionUtils.isEmpty(graphicObjects)) { return null; } Element figureElement = XmlBuilderUtils.teiElement("figure"); if (id != null) { XmlBuilderUtils.addXmlId(figureElement, "fig_" + id); } if (config.isGenerateTeiCoordinates("figure")) { List<BoundingBox> theBoxes = null; // non graphic elements if (getLayoutTokens() != null && getLayoutTokens().size() > 0) { theBoxes = BoundingBoxCalculator.calculate(getLayoutTokens()); } // if (getBitmapGraphicObjects() != null && !getBitmapGraphicObjects().isEmpty()) { // -> note: this was restricted to the bitmap objects only... the bounding box calculation // with vector graphics might need some double check // here we bound all figure graphics in one single box (given that we can have hundred graphics // in a single figure) BoundingBox theGraphicsBox = null; if ((graphicObjects != null) && (graphicObjects.size() > 0)) { for (GraphicObject graphicObject : graphicObjects) { if (theGraphicsBox == null) { theGraphicsBox = graphicObject.getBoundingBox(); } else { theGraphicsBox = theGraphicsBox.boundBoxExcludingAnotherPage(graphicObject.getBoundingBox()); } } } if (theGraphicsBox != null) { if (theBoxes == null) theBoxes = new ArrayList<>(); theBoxes.add(theGraphicsBox); } if (theBoxes != null && theBoxes.size() > 0) { String coords = Joiner.on(";").join(theBoxes); XmlBuilderUtils.addCoords(figureElement, coords); } } if (header != null) { Element head = XmlBuilderUtils.teiElement("head", LayoutTokensUtil.normalizeText(header.toString())); figureElement.appendChild(head); } if (label != null) { Element labelEl = XmlBuilderUtils.teiElement("label", LayoutTokensUtil.normalizeText(label.toString())); figureElement.appendChild(labelEl); } if (caption != null) { Element desc = XmlBuilderUtils.teiElement("figDesc"); if (config.isGenerateTeiIds()) { String divID = KeyGen.getKey().substring(0, 7); addXmlId(desc, "_" + divID); } // if the segment has been parsed with the full text model we further extract the clusters // to get the bibliographical references if ( (labeledCaption != null) && (labeledCaption.length() > 0) ) { TaggingTokenClusteror clusteror = new TaggingTokenClusteror(GrobidModels.FULLTEXT, labeledCaption, captionLayoutTokens); List<TaggingTokenCluster> clusters = clusteror.cluster(); MarkerType citationMarkerType = null; if (markerTypes != null && markerTypes.size()>0) { citationMarkerType = markerTypes.get(0); } for (TaggingTokenCluster cluster : clusters) { if (cluster == null) { continue; } TaggingLabel clusterLabel = cluster.getTaggingLabel(); //String clusterContent = LayoutTokensUtil.normalizeText(cluster.concatTokens()); String clusterContent = LayoutTokensUtil.normalizeDehyphenizeText(cluster.concatTokens()); if (clusterLabel.equals(TaggingLabels.CITATION_MARKER)) { try { List<Node> refNodes = formatter.markReferencesTEILuceneBased( cluster.concatTokens(), doc.getReferenceMarkerMatcher(), config.isGenerateTeiCoordinates("ref"), false, citationMarkerType); if (refNodes != null) { for (Node n : refNodes) { desc.appendChild(n); } } } catch(Exception e) { LOGGER.warn("Problem when serializing TEI fragment for figure caption", e); } } else { desc.appendChild(textNode(clusterContent)); } } } else { desc.appendChild(LayoutTokensUtil.normalizeText(caption.toString()).trim()); //Element desc = XmlBuilderUtils.teiElement("figDesc", // LayoutTokensUtil.normalizeText(caption.toString())); } if (desc != null && config.isWithSentenceSegmentation()) { formatter.segmentIntoSentences(desc, this.captionLayoutTokens, config, doc.getLanguage()); // we need a sentence segmentation of the figure caption, for that we need to introduce // a <div>, then a <p> desc.setLocalName("p"); Element div = XmlBuilderUtils.teiElement("div"); div.appendChild(desc); Element figDesc = XmlBuilderUtils.teiElement("figDesc"); figDesc.appendChild(div); desc = figDesc; } figureElement.appendChild(desc); } if ((graphicObjects != null) && (graphicObjects.size() > 0)) { for (GraphicObject graphicObject : graphicObjects) { Element go = XmlBuilderUtils.teiElement("graphic"); String uri = graphicObject.getURI(); if (uri != null) { go.addAttribute(new Attribute("url", uri)); } if (graphicObject.getBoundingBox() != null) { go.addAttribute(new Attribute("coords", graphicObject.getBoundingBox().toString())); } go.addAttribute(new Attribute("type", graphicObject.getType().name().toLowerCase())); if (graphicObject.isMask()) { go.addAttribute(new Attribute("mask", "true")); } figureElement.appendChild(go); } } return figureElement.toXML(); } private String cleanString(String input) { return input.replace("\n", " ").replace(" ", " ").trim(); } public int getPage() { return page; } public double getHeight() { return height; } public double getWidth() { return width; } public double getX() { return x; } public double getY() { return y; } public URI getUri() { return uri; } public void setPage(int page) { this.page = page; } public void setY(double y) { this.y = y; } public void setX(double x) { this.x = x; } public void setWidth(double width) { this.width = width; } public void setHeight(double height) { this.height = height; } public List<BoundingBox> getTextArea() { return textArea; } public void setTextArea(List<BoundingBox> textArea) { this.textArea = textArea; } public List<LayoutToken> getLayoutTokens() { return layoutTokens; } public void setLayoutTokens(List<LayoutToken> layoutTokens) { this.layoutTokens = layoutTokens; } public void addLayoutTokens(List<LayoutToken> layoutTokens) { if (this.layoutTokens == null) this.layoutTokens = new ArrayList<>(); this.layoutTokens.addAll(layoutTokens); } public void setBlockPtrs(SortedSet<Integer> blockPtrs) { this.blockPtrs = blockPtrs; } public SortedSet<Integer> getBlockPtrs() { return blockPtrs; } public void setCaption(StringBuilder caption) { this.caption = caption; } public void setHeader(StringBuilder header) { this.header = header; } public void setContent(StringBuilder content) { this.content = content; } public void setLabel(StringBuilder label) { this.label = label; } public void setUri(URI uri) { this.uri = uri; } }
19,038
32.284965
145
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/BiblioItem.java
package org.grobid.core.data; import org.apache.commons.lang3.StringUtils; import org.apache.commons.collections4.CollectionUtils; import org.grobid.core.data.util.AuthorEmailAssigner; import org.grobid.core.data.util.ClassicAuthorEmailAssigner; import org.grobid.core.data.util.EmailSanitizer; import org.grobid.core.document.*; import org.grobid.core.engines.config.GrobidAnalysisConfig; import org.grobid.core.exceptions.GrobidException; import org.grobid.core.lang.Language; import org.grobid.core.layout.BoundingBox; import org.grobid.core.layout.LayoutToken; import org.grobid.core.tokenization.TaggingTokenCluster; import org.grobid.core.tokenization.TaggingTokenClusteror; import org.grobid.core.engines.label.TaggingLabel; import org.grobid.core.lexicon.Lexicon; import org.grobid.core.utilities.LanguageUtilities; import org.grobid.core.utilities.TextUtilities; import org.grobid.core.utilities.KeyGen; import org.grobid.core.utilities.LayoutTokensUtil; import org.grobid.core.GrobidModels; import java.net.URLEncoder; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Class for representing and exchanging a bibliographical item. * */ public class BiblioItem { protected static final Logger LOGGER = LoggerFactory.getLogger(BiblioItem.class); private LanguageUtilities languageUtilities = LanguageUtilities.getInstance(); private AuthorEmailAssigner authorEmailAssigner = new ClassicAuthorEmailAssigner(); private EmailSanitizer emailSanitizer = new EmailSanitizer(); private String teiId; //TODO: keep in sync with teiId - now teiId is generated in many different places private Integer ordinal; private List<BoundingBox> coordinates = null; // map of labels (e.g. <title> or <abstract>) to LayoutToken private Map<String, List<LayoutToken>> labeledTokens; /** * The following are internal working structures not meant to be used outside. * For collecting layout tokens of the various bibliographical component, * please refers to @See(getLayoutTokens(TaggingLabels label) */ private List<LayoutToken> authorsTokensWorkingCopy = new ArrayList<>(); private List<LayoutToken> abstractTokensWorkingCopy = new ArrayList<>(); @Override public String toString() { return "BiblioItem{" + "submission_date='" + submission_date + '\'' + ", download_date='" + download_date + '\'' + ", server_date='" + server_date + '\'' + ", languageUtilities=" + languageUtilities + ", item=" + item + ", parentItem=" + parentItem + ", ISBN13='" + ISBN13 + '\'' + ", ISBN10='" + ISBN10 + '\'' + ", title='" + title + '\'' + ", publisher='" + publisher + '\'' + ", nbPages=" + nbPages + ", edition='" + edition + '\'' + ", language='" + language + '\'' + ", subtitle='" + subtitle + '\'' + ", publication_date='" + publication_date + '\'' + ", normalized_publication_date=" + normalized_publication_date + ", editors='" + editors + '\'' + ", publisher_website='" + publisher_website + '\'' + ", serie='" + serie + '\'' + ", ISSN='" + ISSN + '\'' + ", ISSNe='" + ISSNe + '\'' + ", volume='" + volume + '\'' + ", number='" + number + '\'' + ", month='" + month + '\'' + ", support_type='" + support_type + '\'' + ", version='" + version + '\'' + ", smallImageURL='" + smallImageURL + '\'' + ", largeImageURL='" + largeImageURL + '\'' + ", publisherPlace='" + publisherPlace + '\'' + ", review='" + review + '\'' + ", keywords=" + keywords + ", subjects=" + subjects + ", categories='" + categories + '\'' + ", type='" + type + '\'' + ", typeDescription='" + typeDescription + '\'' + ", book_type='" + book_type + '\'' + ", DOI='" + doi + '\'' + ", arXivId='" + arXivId + '\'' + ", PMID='" + PMID + '\'' + ", PMCID='" + PMCID + '\'' + ", PII='" + PII + '\'' + ", ark='" + ark + '\'' + ", istexId='" + istexId + '\'' + ", inDOI='" + inDOI + '\'' + ", abstract_='" + abstract_ + '\'' + ", authors='" + authors + '\'' + ", firstAuthorSurname='" + firstAuthorSurname + '\'' + ", location='" + location + '\'' + ", bookTitle='" + bookTitle + '\'' + ", serieTitle='" + serieTitle + '\'' + ", pageRange='" + pageRange + '\'' + ", journal='" + journal + '\'' + ", volumeBlock='" + volumeBlock + '\'' + ", institution='" + institution + '\'' + ", note='" + note + '\'' + ", affiliation='" + affiliation + '\'' + ", address='" + address + '\'' + ", country='" + country + '\'' + ", town='" + town + '\'' + ", email='" + email + '\'' + ", pubnum='" + pubnum + '\'' + ", keyword='" + keyword + '\'' + ", phone='" + phone + '\'' + ", degree='" + degree + '\'' + ", web='" + web + '\'' + ", issue='" + issue + '\'' + ", journal_abbrev='" + journal_abbrev + '\'' + ", event='" + event + '\'' + ", abstractHeader='" + abstractHeader + '\'' + ", day='" + day + '\'' + ", locationPublisher='" + locationPublisher + '\'' + ", dedication='" + dedication + '\'' + ", submission='" + submission + '\'' + ", english_title='" + english_title + '\'' + ", url='" + url + '\'' + ", oaUrl='" + oaUrl + '\'' + ", uri='" + uri + '\'' + ", confidence='" + confidence + '\'' + ", conf=" + conf + ", e_year='" + e_year + '\'' + ", e_month='" + e_month + '\'' + ", e_day='" + e_day + '\'' + ", s_year='" + s_year + '\'' + ", s_month='" + s_month + '\'' + ", s_day='" + s_day + '\'' + ", d_year='" + d_year + '\'' + ", d_month='" + d_month + '\'' + ", d_day='" + d_day + '\'' + ", a_year='" + a_year + '\'' + ", a_month='" + a_month + '\'' + ", a_day='" + a_day + '\'' + ", authorList=" + authorList + ", editorList=" + editorList + ", affiliationList=" + affiliationList + ", addressList=" + addressList + ", emailList=" + emailList + ", webList=" + webList + ", phoneList=" + phoneList + ", markers=" + markers + ", fullAuthors=" + fullAuthors + ", fullEditors=" + fullEditors + ", fullAffiliations=" + fullAffiliations + ", reference='" + reference + '\'' + ", copyright='" + copyright + '\'' + ", funding='" + funding + '\'' + ", affiliationAddressBlock='" + affiliationAddressBlock + '\'' + ", articleTitle='" + articleTitle + '\'' + ", beginPage=" + beginPage + ", endPage=" + endPage + ", year='" + year + '\'' + ", authorString='" + authorString + '\'' + ", path='" + path + '\'' + ", collaboration='" + collaboration + '\'' + ", postProcessEditors=" + postProcessEditors + ", crossrefError=" + crossrefError + ", normalized_submission_date=" + normalized_submission_date + ", normalized_download_date=" + normalized_download_date + ", originalAffiliation='" + originalAffiliation + '\'' + ", originalAbstract='" + originalAbstract + '\'' + ", originalTitle='" + originalTitle + '\'' + ", originalAuthors='" + originalAuthors + '\'' + ", originalEditors='" + originalEditors + '\'' + ", originalAddress='" + originalAddress + '\'' + ", originalNote='" + originalNote + '\'' + ", originalKeyword='" + originalKeyword + '\'' + ", originalVolumeBlock='" + originalVolumeBlock + '\'' + ", originalJournal='" + originalJournal + '\'' + ", workingGroup='" + workingGroup + '\'' + ", documentType='" + documentType + '\'' + '}'; } public int item = -1; public static final int Book = 0; // the whole book public static final int Periodical = 1; // the journal or magazine item public static final int Digital_support = 2; public static final int Article = 3; // of a journal or magazine public static final int Unknown = 4; public static final int InBook = 5; public static final int InProceedings = 6; public static final int InCollection = 7; public static final int Manual = 8; public static final int TechReport = 9; public static final int MasterThesis = 10; public static final int PhdThesis = 11; public static final int Unpublished = 12; public static final int Proceedings = 13; public static final int Serie = 14; private BiblioItem parentItem = null; // the bibliographic item "container", i.e. // the book for a chapter // the journal for a journal article, etc. private String ISBN13 = null; private String ISBN10 = null; private String title = null; private String publisher = null; private int nbPages = -1; private String edition = null; private String language = null; private String subtitle = null; private String publication_date = null; private Date normalized_publication_date = null; private String editors = null; private String publisher_website = null; private String serie = null; private String ISSN = null; // print/default private String ISSNe = null; // electronic private String volume = null; private String number = null; private String month = null; private String support_type = null; private String version = null; private String smallImageURL = null; private String largeImageURL = null; private String publisherPlace = null; private String review = null; private List<Keyword> keywords; private List<String> subjects; private List<String> categories; private String type = null; // book, journal, proceedings, in book, etc private String typeDescription = null; private String book_type = null; private String doi = null; private String inDOI = null; private String arXivId = null; private String PMID = null; private String PMCID = null; private String PII = null; private String ark = null; private String istexId = null; private String abstract_ = null; private String collaboration = null; private String documentType = null; // for convenience GROBIDesque private String authors = null; //private List<LayoutToken> authorsTokens = new ArrayList<>(); private String firstAuthorSurname = null; private String location = null; private String bookTitle = null; private String serieTitle = null; private String pageRange = null; private String journal = null; private String volumeBlock = null; private String institution = null; private String note = null; private String affiliation = null; private String address = null; private String country = null; private String town = null; private String email = null; private String pubnum = null; private String keyword = null; private String phone = null; private String degree = null; private String web = null; private String issue = null; private String journal_abbrev = null; private String event = null; private String abstractHeader = null; private String day = null; private String locationPublisher = null; private String dedication = null; private String submission = null; private String english_title = null; private String url = null; private String oaUrl = null; private String uri = null; private String confidence = null; private double conf = 0.0; // abstract labeled featured sequence (to produce a structured abstract with, in particular, reference callout) private String labeledAbstract = null; // date for electronic publishing private String e_year = null; private String e_month = null; private String e_day = null; // date of submission private String s_year = null; private String s_month = null; private String s_day = null; // date of acceptance private String a_year = null; private String a_month = null; private String a_day = null; // date of download private String d_year = null; private String d_month = null; private String d_day = null; // advanced grobid recognitions private List<String> authorList; private List<String> editorList; private List<String> affiliationList; private List<String> addressList; private List<String> emailList; private List<String> webList; private List<String> phoneList; private List<String> markers; private List<Person> fullAuthors = null; private List<Person> fullEditors = null; private List<Affiliation> fullAffiliations = null; private String reference = null; private String copyright = null; private String funding = null; //public List<String> affiliationAddressBlock = null; public String affiliationAddressBlock = null; // just for articles private String articleTitle = null; private int beginPage = -1; private int endPage = -1; private String year = null; // default is publication date on print media private String authorString = null; private String path = ""; private boolean postProcessEditors = false; private boolean crossrefError = true; private String submission_date = null; private Date normalized_submission_date = null; private String download_date = null; private Date normalized_download_date = null; private String server_date = null; private Date normalized_server_date = null; // for OCR post-corrections private String originalAffiliation = null; private String originalAbstract = null; private String originalTitle = null; private String originalAuthors = null; private String originalEditors = null; private String originalAddress = null; private String originalNote = null; private String originalKeyword = null; private String originalVolumeBlock = null; private String originalJournal = null; private String workingGroup = null; private String rawMeeting = null; // Availability statement private String availabilityStmt = null; public static final List<String> confPrefixes = Arrays.asList("Proceedings of", "proceedings of", "In Proceedings of the", "In: Proceeding of", "In Proceedings, ", "In Proceedings of", "In Proceeding of", "in Proceeding of", "in Proceeding", "In Proceeding", "Proceedings", "proceedings", "In Proc", "in Proc", "In Proc.", "in Proc.", "In proc.", "in proc", "in proc.", "In proc", "Proc", "proc", "Proc.", "proc.", "Acte de la", "Acte de", "Acte", "acte de la", "acte de", "acte"); public BiblioItem() { } public void setParentItem(BiblioItem bi) { parentItem = bi; } public BiblioItem getParentItem() { return parentItem; } public int getItem() { return item; } public void setItem(int type) { item = type; } public String getISBN13() { return this.ISBN13; } public String getISBN10() { return this.ISBN10; } public String getTitle() { return this.title; } public String getPublisher() { return this.publisher; } public String getEdition() { return this.edition; } public String getLanguage() { return this.language; } public String getSubtitle() { if (subtitle != null) if (subtitle.length() != 0) if (!subtitle.equals("null")) return this.subtitle; return null; } public String getPublicationDate() { return this.publication_date; } public Date getNormalizedPublicationDate() { return normalized_publication_date; } public String getEditors() { return this.editors; } public String getPublisherWebsite() { return this.publisher_website; } public String getSerie() { return this.serie; } public String getISSN() { return this.ISSN; } public String getISSNe() { return this.ISSNe; } public String getVolume() { return this.volume; } public String getNumber() { return this.number; } public String getMonth() { return this.month; } public String getSupportType() { return this.support_type; } public String getVersion() { return this.version; } public String getSmallImageURL() { return this.smallImageURL; } public String getLargeImageURL() { return this.largeImageURL; } public String getPublisherPlace() { return publisherPlace; } public String getReview() { return this.review; } public List<String> getCategories() { return this.categories; } public int getNbPages() { return nbPages; } public String getType() { return type; } public String getTypeDescription() { return typeDescription; } public String getBookType() { return book_type; } public String getDOI() { return doi; } public String getArk() { return ark; } public String getIstexId() { return istexId; } public String getInDOI() { return inDOI; } public String getArXivId() { return arXivId; } public String getPMID() { return PMID; } public String getPMCID() { return PMCID; } public String getPII() { return PII; } public String getArticleTitle() { return articleTitle; } public int getBeginPage() { return beginPage; } public int getEndPage() { return endPage; } public String getYear() { return year; } public String getAbstract() { return abstract_; } public String getLabeledAbstract() { return labeledAbstract; } public String getEmail() { return email; } public String getPubnum() { return pubnum; } public String getCollaboration() { return collaboration; } public String getSerieTitle() { return serieTitle; } public String getURL() { return url; } public String getOAURL() { return oaUrl; } public String getURI() { return uri; } public String getConfidence() { return confidence; } // temp public String getAuthors() { return authors; } public String getLocation() { return location; } public String getBookTitle() { return bookTitle; } public String getPageRange() { if (pageRange != null) return pageRange; else if ((beginPage != -1) && (endPage != -1)) return "" + beginPage + "--" + endPage; else return null; } public String getJournal() { return journal; } public String getVolumeBlock() { return volumeBlock; } public String getInstitution() { return institution; } public String getNote() { return note; } public String getAffiliation() { return affiliation; } public String getAddress() { return address; } public String getCountry() { return country; } public String getTown() { return town; } public String getKeyword() { return keyword; } public List<Keyword> getKeywords() { return keywords; } public List<String> getSubjects() { return subjects; } public String getPhone() { return phone; } public String getDegree() { return degree; } public String getWeb() { return web; } public String getIssue() { return issue; } public String getJournalAbbrev() { return journal_abbrev; } public String getEvent() { return event; } public boolean getError() { return crossrefError; } public String getAbstractHeader() { return abstractHeader; } public String getDay() { return day; } public String getLocationPublisher() { return locationPublisher; } public String getAuthorString() { return authorString; } public String getE_Year() { return e_year; } public String getE_Month() { return e_month; } public String getE_Day() { return e_day; } public String getS_Year() { return s_year; } public String getS_Month() { return s_month; } public String getS_Day() { return s_day; } public String getA_Year() { return a_year; } public String getA_Month() { return a_month; } public String getA_Day() { return a_day; } public String getD_Year() { return d_year; } public String getD_Month() { return d_month; } public String getD_Day() { return d_day; } public String getDedication() { return dedication; } public String getSubmission() { return submission; } public String getEnglishTitle() { return english_title; } public String getSubmissionDate() { return submission_date; } public Date getNormalizedSubmissionDate() { return normalized_submission_date; } public String getDownloadDate() { return download_date; } public Date getNormalizedDownloadDate() { return normalized_download_date; } public String getServerDate() { return server_date; } public Date getNormalizedServerDate() { return normalized_server_date; } public String getOriginalAffiliation() { return originalAffiliation; } public String getOriginalAbstract() { return originalAbstract; } public String getOriginalAuthors() { return originalAuthors; } public String getOriginalEditors() { return originalEditors; } public String getOriginalTitle() { return originalTitle; } public String getOriginalAddress() { return originalAddress; } public String getOriginalNote() { return originalNote; } public String getOriginalKeyword() { return originalKeyword; } public String getOriginalVolumeBlock() { return originalVolumeBlock; } public String getOriginalJournal() { return originalJournal; } public List<org.grobid.core.data.Person> getFullAuthors() { return fullAuthors; } public List<org.grobid.core.data.Person> getFullEditors() { return fullEditors; } public List<org.grobid.core.data.Affiliation> getFullAffiliations() { return fullAffiliations; } public String getReference() { return reference; } public String getCopyright() { return copyright; } public String getFunding() { return funding; } public String getWorkingGroup() { return workingGroup; } public String getDocumentType() { return documentType; } public void setISBN13(String isbn) { /* some cleaning... */ this.ISBN13 = StringUtils.normalizeSpace(cleanISBNString(isbn)); } public void setISBN10(String isbn) { /* some cleaning... */ this.ISBN10 = StringUtils.normalizeSpace(isbn); } public void setTitle(String theTitle) { this.title = StringUtils.normalizeSpace(theTitle); } public void setPublisher(String thePublisher) { this.publisher = StringUtils.normalizeSpace(thePublisher); } public void setEdition(String theEdition) { if (theEdition != null) { if (theEdition.length() > 10) { theEdition = theEdition.substring(0, 9); } } this.edition = StringUtils.normalizeSpace(theEdition); } public void setLanguage(String theLanguage) { this.language = StringUtils.normalizeSpace(theLanguage); } public void setSubtitle(String theSubtitle) { this.subtitle = StringUtils.normalizeSpace(theSubtitle); } public void setPublicationDate(String theDate) { this.publication_date = StringUtils.normalizeSpace(theDate); } public void setNormalizedPublicationDate(Date theDate) { this.normalized_publication_date = theDate; } public void mergeNormalizedPublicationDate(Date theDate) { this.normalized_publication_date = Date.merge(this.normalized_publication_date , theDate); } public void setEditors(String theEditors) { this.editors = StringUtils.normalizeSpace(theEditors); } public void setPublisherWebsite(String theWebsite) { this.publisher_website = StringUtils.normalizeSpace(theWebsite); } public void setSerie(String theSerie) { this.serie = StringUtils.normalizeSpace(theSerie); } public void setISSN(String theISSN) { this.ISSN = StringUtils.normalizeSpace(theISSN); } public void setISSNe(String theISSN) { this.ISSNe = StringUtils.normalizeSpace(theISSN); } public void setVolume(String theVolume) { this.volume = StringUtils.normalizeSpace(theVolume); } public void setNumber(String theNumber) { this.number = StringUtils.normalizeSpace(theNumber); } public void setMonth(String theMonth) { this.month = StringUtils.normalizeSpace(theMonth); } public void setSupportType(String theType) { this.support_type = StringUtils.normalizeSpace(theType); } public void setVersion(String theVersion) { this.version = StringUtils.normalizeSpace(theVersion); } public void setSmallImageURL(String url) { this.smallImageURL = url; } public void setLargeImageURL(String url) { this.largeImageURL = url; } public void setPublisherPlace(String p) { this.publisherPlace = StringUtils.normalizeSpace(p); } public void setCategories(List<String> cat) { this.categories = cat; } public void addCategory(String cat) { if (categories == null) { categories = new ArrayList<String>(); } categories.add(cat); } public void setNbPages(int nb) { this.nbPages = nb; } public void setReview(String rev) { this.review = rev; } public void setType(String t) { this.type = t; } public void setTypeDescription(String t) { typeDescription = t; } public void setBookType(String bt) { this.book_type = StringUtils.normalizeSpace(bt); } public void setDOI(String id) { if (id == null) return; this.doi = cleanDOI(id); } public void setInDOI(String id) { if (id != null) { inDOI = StringUtils.normalizeSpace(id); inDOI = inDOI.replace(" ", ""); inDOI = cleanDOI(inDOI); } } public static String cleanDOI(String doi) { if (doi == null) { return doi; } doi = StringUtils.normalizeSpace(doi); doi = doi.replace(" ", ""); doi = doi.replaceAll("https?\\://(dx\\.)?doi\\.org/", ""); //bibl = bibl.replace("//", "/"); if (doi.toLowerCase().startsWith("doi:") || doi.toLowerCase().startsWith("doi/")) { doi = doi.substring(4); } if (doi.toLowerCase().startsWith("doi")) { doi = doi.substring(3); } // pretty common wrong extraction pattern: // 43-61.DOI:10.1093/jpepsy/14.1.436/7 // 367-74.DOI:10.1080/14034940210165064 // (pages concatenated to the DOI) - easy/safe to fix if (StringUtils.containsIgnoreCase(doi, "doi:10.")) { doi = doi.substring(StringUtils.indexOfIgnoreCase(doi, "doi:10.")+4); } // for DOI coming from PDF links, we have some prefix cleaning to make if (doi.startsWith("file://") || doi.startsWith("https://") || doi.startsWith("http://")) { int ind = doi.indexOf("/10."); if (ind != -1) doi = doi.substring(ind+1); } doi = doi.trim(); int ind = doi.indexOf("http://"); if (ind > 10) { doi = doi.substring(0, ind); } doi = doi.replaceAll("[\\p{M}]", ""); doi = doi.replaceAll("\\p{InCombiningDiacriticalMarks}+", ""); return doi; } public void setArXivId(String id) { if (id != null) { arXivId = StringUtils.normalizeSpace(id); arXivId = arXivId.replace(" ", ""); } } public void setPMID(String id) { if (id != null) { PMID = StringUtils.normalizeSpace(id); PMID = PMID.replace(" ", ""); } } public void setPMCID(String id) { if (id != null) { PMCID = StringUtils.normalizeSpace(id); PMCID = PMCID.replace(" ", ""); } } public void setPII(String id) { if (id != null) { PII = StringUtils.normalizeSpace(id); PII = PII.replace(" ", ""); } } public void setIstexId(String id) { istexId = id; } public void setArk(String id) { ark = id; } public void setArticleTitle(String ti) { articleTitle = StringUtils.normalizeSpace(ti); } public void setBeginPage(int p) { beginPage = p; } public void setEndPage(int p) { endPage = p; } public void setYear(String y) { year = StringUtils.normalizeSpace(y); } public void setAbstract(String a) { abstract_ = cleanAbstract(a); } public void setLabeledAbstract(String labeledAbstract) { this.labeledAbstract = labeledAbstract; } public void setLocationPublisher(String s) { locationPublisher = StringUtils.normalizeSpace(s); } public void setSerieTitle(String s) { serieTitle = StringUtils.normalizeSpace(s); } public void setAuthorString(String s) { authorString = s; } public void setURL(String s) { url = StringUtils.normalizeSpace(s); } public void setOAURL(String s) { oaUrl = s; } public void setURI(String s) { uri = StringUtils.normalizeSpace(s); } public void setConfidence(String s) { confidence = s; } public void setConf(double b) { conf = b; } public void setFullAuthors(List<org.grobid.core.data.Person> full) { fullAuthors = full; } public void setFullEditors(List<org.grobid.core.data.Person> full) { fullEditors = full; } public void setFullAffiliations(List<org.grobid.core.data.Affiliation> full) { fullAffiliations = full; // if no id is present in the affiliation objects, we add one int num = 0; if (fullAffiliations != null) { for (Affiliation affiliation : fullAffiliations) { if (affiliation.getKey() == null) { affiliation.setKey("aff"+num); } num++; } } } public void setWorkingGroup(String wg) { this.workingGroup = wg; } public void setDocumentType(String doctype) { this.documentType = doctype; } // temp public void setAuthors(String aut) { authors = aut; } public BiblioItem collectAuthorsToken(LayoutToken lt) { authorsTokensWorkingCopy.add(lt); return this; } public void collectAuthorsTokens(List<LayoutToken> layoutTokens) { this.authorsTokensWorkingCopy.addAll(layoutTokens); } public void collectAbstractTokens(List<LayoutToken> layoutTokens) { this.abstractTokensWorkingCopy.addAll(layoutTokens); } public void addAuthor(String aut) { if (authors == null) authors = aut; else authors += " ; " + aut; if (authorList == null) authorList = new ArrayList<String>(); if (!authorList.contains(aut)) authorList.add(aut); } public void addFullAuthor(Person aut) { if (fullAuthors == null) fullAuthors = new ArrayList<Person>(); if (!fullAuthors.contains(aut)) fullAuthors.add(aut); } public void addFullEditor(Person aut) { if (fullEditors == null) fullEditors = new ArrayList<Person>(); if (!fullEditors.contains(aut)) fullEditors.add(aut); } public void addEditor(String aut) { if (editors == null) editors = aut; else editors += " ; " + aut; if (editorList == null) editorList = new ArrayList<String>(); if (!editorList.contains(aut)) editorList.add(aut); } public void setLocation(String loc) { location = StringUtils.normalizeSpace(loc); } public void setBookTitle(String book) { bookTitle = StringUtils.normalizeSpace(book); } public void setPageRange(String pages) { pageRange = StringUtils.normalizeSpace(pages); } public void setJournal(String jour) { journal = StringUtils.normalizeSpace(jour); } public void setVolumeBlock(String vol, boolean postProcess) { volumeBlock = StringUtils.normalizeSpace(vol); if (postProcess) volumeBlock = postProcessVolumeBlock(); } public void setInstitution(String inst) { institution = StringUtils.normalizeSpace(inst); } public void setNote(String not) { note = StringUtils.normalizeSpace(not); } public void setAffiliation(String a) { affiliation = a; } public void setAddress(String a) { address = a; } public void setCountry(String a) { country = a; } public void setTown(String a) { town = a; } public void setEmail(String e) { email = e; } public void setPubnum(String p) { pubnum = StringUtils.normalizeSpace(p); } public void setKeyword(String k) { keyword = cleanKeywords(k); } public void addKeyword(String k) { if (keywords == null) keywords = new ArrayList<Keyword>(); String theKey = cleanKeywords(k); if (theKey.toLowerCase().contains("introduction")) { // if the keyword contains introduction, this is normally a segmentation error theKey = null; } if (theKey != null) { keywords.add(new Keyword(theKey)); } } public void setKeywords(List<Keyword> k) { keywords = k; } public void addSubject(String k) { if (subjects == null) subjects = new ArrayList<String>(); subjects.add(k); } public void setSubjects(List<String> k) { subjects = k; } public void setPhone(String p) { phone = p; } public void setDegree(String d) { degree = StringUtils.normalizeSpace(d); } public void setWeb(String w) { web = StringUtils.normalizeSpace(w); web = web.replace(" ", ""); if (StringUtils.isEmpty(doi)) { Matcher doiMatcher = TextUtilities.DOIPattern.matcher(web); if (doiMatcher.find()) { setDOI(doiMatcher.group()); } } } public void setCollaboration(String collab) { collaboration = StringUtils.normalizeSpace(collab); } public void setIssue(String i) { issue = StringUtils.normalizeSpace(i); } public void setJournalAbbrev(String j) { journal_abbrev = StringUtils.normalizeSpace(j); } public void setEvent(String e) { event = StringUtils.normalizeSpace(e); } public void setError(boolean e) { crossrefError = e; } public void setAbstractHeader(String a) { abstractHeader = StringUtils.normalizeSpace(a); } public void setPath(String p) { path = p; } public void setDay(String d) { day = d; } public void setE_Year(String d) { e_year = d; } public void setE_Month(String d) { e_month = d; } public void setE_Day(String d) { e_day = d; } public void setA_Year(String d) { a_year = d; } public void setA_Month(String d) { a_month = d; } public void setA_Day(String d) { a_day = d; } public void setS_Year(String d) { s_year = d; } public void setS_Month(String d) { s_month = d; } public void setS_Day(String d) { s_day = d; } public void setD_Year(String d) { d_year = d; } public void setD_Month(String d) { d_month = d; } public void setD_Day(String d) { d_day = d; } public void setDedication(String d) { dedication = StringUtils.normalizeSpace(d); } public void setSubmission(String s) { submission = StringUtils.normalizeSpace(s); } public void setEnglishTitle(String d) { english_title = StringUtils.normalizeSpace(d); } public void setSubmissionDate(String d) { submission_date = StringUtils.normalizeSpace(d); } public void setNormalizedSubmissionDate(Date d) { normalized_submission_date = d; } public void setDownloadDate(String d) { download_date = StringUtils.normalizeSpace(d); } public void setNormalizedDownloadDate(Date d) { normalized_download_date = d; } public void setServerDate(String d) { server_date = StringUtils.normalizeSpace(d); } public void setNormalizedServerDate(Date d) { normalized_server_date = d; } public void setOriginalAffiliation(String original) { originalAffiliation = original; } public void setOriginalAbstract(String original) { originalAbstract = original; } public void setOriginalAuthors(String original) { originalAuthors = original; } public void setOriginalEditors(String original) { originalEditors = original; } public void setOriginalTitle(String original) { originalTitle = original; } public void setOriginalAddress(String original) { originalAddress = original; } public void setOriginalNote(String original) { originalNote = original; } public void setOriginalKeyword(String original) { originalKeyword = original; } public void setOriginalVolumeBlock(String original) { originalVolumeBlock = original; } public void setOriginalJournal(String original) { originalJournal = original; } public void setReference(String ref) { reference = ref; } public void setCopyright(String cop) { copyright = StringUtils.normalizeSpace(cop); } public void setFunding(String gra) { funding = StringUtils.normalizeSpace(gra); } public String getMeeting() { return rawMeeting; } public void setMeeting(String meet) { this.rawMeeting = meet; } /** * General string cleaining for SQL strings. This method might depend on the chosen * relational database. */ public static String cleanSQLString(String str) { if (str == null) return null; if (str.length() == 0) return null; String cleanedString = ""; boolean special = false; for (int index = 0; (index < str.length()); index++) { char currentCharacter = str.charAt(index); if ((currentCharacter == '\'') || (currentCharacter == '%') || (currentCharacter == '_')) { special = true; cleanedString += '\\'; } cleanedString += currentCharacter; } return cleanedString; } /** * Special string cleaining of ISBN and ISSN numbers. */ public static String cleanISBNString(String str) { String cleanedString = ""; for (int index = 0; (index < str.length()); index++) { char currentCharacter = str.charAt(index); if ((currentCharacter != '-') && (currentCharacter != ' ') && (currentCharacter != '\'')) cleanedString += currentCharacter; } return StringUtils.normalizeSpace(cleanedString); } /** * Reinit all the values of the current bibliographical item */ public void reset() { ISBN13 = null; ISBN10 = null; title = null; publisher = null; edition = null; language = null; subtitle = null; publication_date = null; normalized_publication_date = null; editors = null; publisher_website = null; serie = null; ISSN = null; ISSNe = null; volume = null; number = null; month = null; support_type = null; version = null; smallImageURL = null; largeImageURL = null; publisherPlace = null; review = null; categories = null; nbPages = -1; type = null; book_type = null; doi = null; istexId = null; ark = null; inDOI = null; arXivId = null; PMID = null; PMCID = null; PII = null; abstract_ = null; url = null; oaUrl = null; uri = null; authors = null; location = null; bookTitle = null; pageRange = null; journal = null; volumeBlock = null; institution = null; note = null; affiliation = null; address = null; email = null; pubnum = null; keyword = null; phone = null; degree = null; web = null; issue = null; journal_abbrev = null; event = null; day = null; submission_date = null; normalized_submission_date = null; download_date = null; normalized_download_date = null; server_date = null; normalized_server_date = null; beginPage = -1; endPage = -1; articleTitle = null; dedication = null; submission = null; english_title = null; fullAuthors = null; fullAffiliations = null; reference = null; copyright = null; funding = null; workingGroup = null; documentType = null; } /** * Post process the volume block in order to distinguish when * possible and when appropriate volume and issue */ public String postProcessVolumeBlock() { if (volumeBlock == null) { return null; } if (volumeBlock.length() == 0) { return volumeBlock; } volumeBlock = StringUtils.normalizeSpace(volumeBlock); // the volume is always the first full number sequence of the block // first we remove the possible non digit character prefix boolean stop = false; int p = 0; while (!stop && (p < volumeBlock.length())) { if (Character.isDigit(volumeBlock.charAt(p))) stop = true; else p++; } if (!stop) return volumeBlock; // we just have letters... we can't do anything int i = p; stop = false; while (!stop && (i < volumeBlock.length())) { if (!Character.isDigit(volumeBlock.charAt(i))) stop = true; else i++; } String resVolume = null; if (stop) resVolume = volumeBlock.substring(p, i); else return volumeBlock.substring(p); // we then have at least one non numerical character stop = false; while (!stop && (i < volumeBlock.length())) { if (Character.isDigit(volumeBlock.charAt(i))) stop = true; else i++; } if (!stop) return resVolume; // if present, the second number sequence is the issue stop = false; int j = i + 1; while (!stop && (j < volumeBlock.length())) { if (!Character.isDigit(volumeBlock.charAt(j))) stop = true; else j++; } if (!stop) j = volumeBlock.length(); issue = volumeBlock.substring(i, j); return resVolume; } /** * Some little cleaning of the abstract field. * * To be done: use a short text model to structure abstract */ public static final String[] ABSTRACT_PREFIXES = {"abstract", "summary", "résumé", "abrégé", "a b s t r a c t"}; public String cleanAbstract(String string) { if (string == null) return null; if (string.length() == 0) return string; String res = StringUtils.normalizeSpace(string); String res0 = res.toLowerCase(); for (String abstractPrefix : ABSTRACT_PREFIXES) { if (res0.startsWith(abstractPrefix)) { if (abstractPrefix.length() < res.length()) { res = res.substring(abstractPrefix.length(), res.length()); res.trim(); } else { res = ""; } abstractHeader = abstractPrefix; break; } } if ((res.startsWith(".")) || (res.startsWith(":")) || (res.startsWith(")"))) { res = res.substring(1, res.length()); res = res.trim(); } //res = res.replace("@BULLET", " • "); res = res.replace("( ", "("); res = res.replace(" )", ")"); res = res.replace(" ", " "); return res; } public static List<LayoutToken> cleanAbstractLayoutTokens(List<LayoutToken> tokens) { if (tokens == null) return null; if (tokens.size() == 0) return tokens; int n = 0; while(n < tokens.size()) { String tokenString = StringUtils.normalizeSpace(tokens.get(n).getText().toLowerCase()); if (tokenString.length() == 0 || TextUtilities.delimiters.contains(tokenString)) { n++; continue; } boolean matchPrefix = false; for (String abstractPrefix : ABSTRACT_PREFIXES) { if (tokenString.equals(abstractPrefix)) { matchPrefix = true; break; } } if (matchPrefix) { n++; continue; } break; } return tokens.subList(n, tokens.size()); } public static void cleanTitles(BiblioItem bibl) { if (bibl.getTitle() != null) { String localTitle = TextUtilities.cleanField(bibl.getTitle(), false); if (localTitle != null && localTitle.endsWith(" y")) { // some markers at the end of the title are extracted from the pdf as " y" at the end of the title // e.g. <title level="a" type="main">Computations in finite-dimensional Lie algebras y</title> localTitle = localTitle.substring(0, localTitle.length() - 2); } bibl.setTitle(localTitle); } if (bibl.getBookTitle() != null) { bibl.setBookTitle(TextUtilities.cleanField(bibl.getBookTitle(), false)); } } /** * Some little cleaning of the keyword field (likely unnecessary with latest header model). */ public static String cleanKeywords(String string) { if (string == null) return null; if (string.length() == 0) return string; String res = StringUtils.normalizeSpace(string); String resLow = res.toLowerCase(); if (resLow.startsWith("keywords")) { res = res.substring(8); } else if (resLow.startsWith("key words") || resLow.startsWith("mots clés") || resLow.startsWith("mots cles")) { res = res.substring(9); } else if (resLow.startsWith("mots clefs")) { res = res.substring(10); } res = res.trim(); if (res.startsWith(":") || res.startsWith("—") || res.startsWith("-")) { res = res.substring(1); } if (res.endsWith(".")) { res = res.substring(0, res.length() - 1); } return res.trim(); } /** * Keyword field segmentation. * * TBD: create a dedicated model to analyse the keyword field, segmenting them properly and * identifying the possible schemes */ public static List<Keyword> segmentKeywords(String string) { if (string == null) return null; if (string.length() == 0) return null; String type = null; if (string.startsWith("Categories and Subject Descriptors")) { type = "subject-headers"; string = string.replace("Categories and Subject Descriptors", "").trim(); } else if (string.startsWith("PACS Numbers") || string.startsWith("PACS") ) { type = "pacs"; string = string.replace("PACS Numbers", "").replace("PACS", "").trim(); if (string.startsWith(":")) { string = string.substring(1); } } else { type = "author"; } List<Keyword> result = new ArrayList<Keyword>(); // the list of possible keyword separators List<String> separators = Arrays.asList(";","■", "•", "ㆍ", "Á", "\n", ",", ".", ":", "/", "|"); List<String> separatorsSecondary = Arrays.asList("•", "■"); for(String separator : separators) { StringTokenizer st = new StringTokenizer(string, separator); if (st.countTokens() > 2) { while (st.hasMoreTokens()) { String res = st.nextToken().trim(); if (res.startsWith(":")) { res = res.substring(1); } boolean noSecondary = true; res = res.replace("\n", " ").replaceAll("( )+", " "); for(String separatorSecondary : separatorsSecondary) { StringTokenizer st2 = new StringTokenizer(res, separatorSecondary); if (st2.countTokens() > 1) { while (st2.hasMoreTokens()) { String res2 = st2.nextToken().trim(); res2 = res2.replace("\n", " ").replaceAll("( )+", " "); Keyword keyw = new Keyword(res2, type); result.add(keyw); } noSecondary = false; } } if (noSecondary) { Keyword keyw = new Keyword(res, type); result.add(keyw); } } break; } } return result; } /** * Export to BibTeX format. Use "id" as BibTeX key. */ public String toBibTeX() { return toBibTeX("id"); } /** * Export to BibTeX format * * @param id the BibTeX key to use. */ public String toBibTeX(String id) { return toBibTeX(id, new GrobidAnalysisConfig.GrobidAnalysisConfigBuilder().includeRawCitations(false).build()); } /** * Export to BibTeX format * * @param id the BibTeX key to use */ public String toBibTeX(String id, GrobidAnalysisConfig config) { String type; if (journal != null) { type = "article"; } else if (book_type != null) { type = "techreport"; } else if (bookTitle != null) { if (StringUtils.containsIgnoreCase(bookTitle, "proceedings") || (bookTitle.startsWith("proc")) || (bookTitle.startsWith("Proc")) || (bookTitle.startsWith("In Proc")) || (bookTitle.startsWith("In proc"))) { type = "inproceedings"; } else { LOGGER.debug("No journal given, but a booktitle. However, the booktitle does not start with \"proc\" or similar strings. Returning inbook"); type = "inbook"; } } else { // using "misc" as fallback type type = "misc"; } StringJoiner bibtex = new StringJoiner(",\n", "@" + type + "{" + id + ",\n", "\n}\n"); try { // author // fullAuthors has to be used instead if (collaboration != null) { bibtex.add(" author = {" + collaboration + "}"); } else { StringJoiner authors = new StringJoiner(" and ", " author = {", "}"); if (fullAuthors != null) { fullAuthors.stream() .filter(person -> person != null) .forEachOrdered(person -> { String author = ""; if (person.getLastName() != null) { author = person.getLastName(); } if (person.getFirstName() != null) { if (author.length() > 0) { author += ", "; } author += person.getFirstName(); } if (author.length() > 0 ) { authors.add(author); } }); } else if (this.authors != null) { StringTokenizer st = new StringTokenizer(this.authors, ";"); while (st.hasMoreTokens()) { String author = st.nextToken(); if (author != null) { authors.add(author.trim()); } } } bibtex.add(authors.toString()); } // title if (title != null) { bibtex.add(" title = {" + title + "}"); } // journal if (journal != null) { bibtex.add(" journal = {" + journal + "}"); } // booktitle if ((journal == null) && (book_type == null) && (bookTitle != null)) { bibtex.add(" booktitle = {" + bookTitle + "}"); } // booktitle if ((journal == null) && (serieTitle != null)) { bibtex.add(" series = {" + serieTitle + "}"); } // publisher if (publisher != null) { bibtex.add(" publisher = {" + publisher + "}"); } // editors if (editors != null) { String locEditors = editors.replace(" ; ", " and "); bibtex.add(" editor = {" + locEditors + "}"); } // fullEditors has to be used instead // dates if (normalized_publication_date != null) { String isoDate = Date.toISOString(normalized_publication_date); if (isoDate != null) { bibtex.add(" date = {" + isoDate + "}"); } if (normalized_publication_date.getYear() >= 0) { bibtex.add(" year = {" + normalized_publication_date.getYear() + "}"); if (normalized_publication_date.getMonth() >= 0) { bibtex.add(" month = {" + normalized_publication_date.getMonth() + "}"); if (normalized_publication_date.getDay() >= 0) { bibtex.add(" day = {" + normalized_publication_date.getDay() + "}"); } } } } else if (publication_date != null) { bibtex.add(" year = {" + publication_date + "}"); } // address if (location != null) { bibtex.add(" address = {" + location + "}"); } // pages if (pageRange != null) { bibtex.add(" pages = {" + pageRange + "}"); } // volume if (volumeBlock != null) { bibtex.add(" volume = {" + volumeBlock + "}"); } // issue (named number in BibTeX) if (issue != null) { bibtex.add(" number = {" + issue + "}"); } // DOI if (!StringUtils.isEmpty(doi)) { bibtex.add(" doi = {" + doi + "}"); } // arXiv identifier if (!StringUtils.isEmpty(arXivId)) { bibtex.add(" eprint = {" + arXivId + "}"); } /* note that the following is now recommended for arXiv citations: archivePrefix = "arXiv", eprint = "0707.3168", primaryClass = "hep-th", (here old identifier :( )) see https://arxiv.org/hypertex/bibstyles/ */ // abstract if (!StringUtils.isEmpty(abstract_)) { bibtex.add(" abstract = {" + abstract_ + "}"); } // keywords if (keywords != null) { String value = keywords.stream() .map(keyword -> keyword.getKeyword()) .filter(keyword -> !StringUtils.isBlank(keyword)) .collect(Collectors.joining(", ", "keywords = {", "}")); bibtex.add(value); } if (config.getIncludeRawCitations() && !StringUtils.isEmpty(reference) ) { // escape all " signs bibtex.add(" raw = {" + reference + "}"); } } catch (Exception e) { LOGGER.error("Cannot export BibTex format, because of nested exception.", e); throw new GrobidException("Cannot export BibTex format, because of nested exception.", e); } return bibtex.toString(); } /** * Check if the identifier pubnum is a DOI or an arXiv identifier. If yes, instanciate * the corresponding field and reset the generic pubnum field. */ public void checkIdentifier() { // DOI if (!StringUtils.isEmpty(pubnum) && StringUtils.isEmpty(doi)) { Matcher doiMatcher = TextUtilities.DOIPattern.matcher(pubnum); if (doiMatcher.find()) { setDOI(pubnum); setPubnum(null); } else { doiMatcher = TextUtilities.DOIPattern.matcher(pubnum.replace(" ", "")); if (doiMatcher.find()) { setDOI(pubnum); setPubnum(null); } } } // arXiv id (this covers old and new versions) if (!StringUtils.isEmpty(pubnum) && StringUtils.isEmpty(arXivId)) { Matcher arxivMatcher = TextUtilities.arXivPattern.matcher(pubnum); if (arxivMatcher.find()) { setArXivId(pubnum); setPubnum(null); } } // PMID if (!StringUtils.isEmpty(pubnum) && StringUtils.isEmpty(PMID)) { Matcher pmidMatcher = TextUtilities.pmidPattern.matcher(pubnum); if (pmidMatcher.find()) { // last group gives the PMID digits String digits = pmidMatcher.group(pmidMatcher.groupCount()); setPMID(digits); setPubnum(null); } } // PMC ID if (!StringUtils.isEmpty(pubnum) && StringUtils.isEmpty(PMCID)) { Matcher pmcidMatcher = TextUtilities.pmcidPattern.matcher(pubnum); if (pmcidMatcher.find()) { // last group gives the PMC ID digits, but the prefix PMC must be added to follow the NIH guidelines String digits = pmcidMatcher.group(pmcidMatcher.groupCount()); setPMCID("PMC"+digits); setPubnum(null); } } // ISSN if (!StringUtils.isEmpty(pubnum) && StringUtils.isEmpty(ISSN)) { if (pubnum.toLowerCase().indexOf("issn") != -1) { pubnum = pubnum.replace("issn", ""); pubnum = pubnum.replace("ISSN", ""); pubnum = TextUtilities.cleanField(pubnum, true); if (pubnum != null) setISSN(pubnum); setPubnum(null); } } // ISBN if (!StringUtils.isEmpty(pubnum) && StringUtils.isEmpty(ISBN13)) { if (pubnum.toLowerCase().indexOf("isbn") != -1) { pubnum = pubnum.replace("isbn", ""); pubnum = pubnum.replace("ISBN", ""); pubnum = TextUtilities.cleanField(pubnum, true); if (pubnum != null && pubnum.length() == 10) setISBN10(pubnum); else if (pubnum != null && pubnum.length() == 13) setISBN13(pubnum); setPubnum(null); } } // TODO: PII } /** * Export the bibliographical item into a TEI BiblStruct string * * @param n - the index of the bibliographical record, the corresponding id will be b+n */ public String toTEI(int n) { return toTEI(n, 0, GrobidAnalysisConfig.defaultInstance()); } /** * Export the bibliographical item into a TEI BiblStruct string * * @param n - the index of the bibliographical record, the corresponding id will be b+n */ public String toTEI(int n, GrobidAnalysisConfig config) { return toTEI(n, 0, config); } /** * Export the bibliographical item into a TEI BiblStruct string * * @param n - the index of the bibliographical record, the corresponding id will be b+n * @param indent - the tabulation indentation for the output of the xml elements */ public String toTEI(int n, int indent) { return toTEI(n, indent, GrobidAnalysisConfig.defaultInstance()); } /** * Export the bibliographical item into a TEI BiblStruct string * * @param n - the index of the bibliographical record, the corresponding id will be b+n * @param indent - the tabulation indentation for the output of the xml elements */ public String toTEI(int n, int indent, GrobidAnalysisConfig config) { StringBuilder tei = new StringBuilder(); boolean generateIDs = config.isGenerateTeiIds(); try { // we just produce here xml strings for (int i = 0; i < indent; i++) { tei.append("\t"); } tei.append("<biblStruct"); boolean withCoords = (config.getGenerateTeiCoordinates() != null) && (config.getGenerateTeiCoordinates().contains("biblStruct")); tei.append(" "); if (withCoords) tei.append(TEIFormatter.getCoordsAttribute(coordinates, withCoords)).append(" "); if (!StringUtils.isEmpty(language)) { if (n == -1) { tei.append("xml:lang=\"" + language + ">\n"); } else { teiId = "b" + n; tei.append("xml:lang=\"" + language + "\" xml:id=\"" + teiId + "\">\n"); } // TBD: we need to ensure that the language is normalized following xml lang attributes ! } else { if (n == -1) { tei.append(">\n"); } else { teiId = "b" + n; tei.append("xml:id=\"" + teiId + "\">\n"); } } boolean openAnalytic = false; if ( ((bookTitle == null) && (journal == null) && (serieTitle == null)) || ((bookTitle != null) && (title == null) && (articleTitle == null) && (journal == null) && (serieTitle == null)) ) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<monogr>\n"); } else { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<analytic>\n"); openAnalytic = true; } // title if (title != null) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<title"); if ((bookTitle == null) && (journal == null) && (serieTitle == null)) { tei.append(" level=\"m\" type=\"main\""); } else tei.append(" level=\"a\" type=\"main\""); if (generateIDs) { String divID = KeyGen.getKey().substring(0,7); tei.append(" xml:id=\"_" + divID + "\""); } // here check the language ? if (StringUtils.isEmpty(english_title)) { tei.append(">").append(TextUtilities.HTMLEncode(title)).append("</title>\n"); } else { tei.append(" xml:lang=\"").append(language) .append("\">").append(TextUtilities.HTMLEncode(title)).append("</title>\n"); } } else if (bookTitle == null) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<title/>\n"); } boolean hasEnglishTitle = false; if (english_title != null) { // here do check the language ! Language resLang = languageUtilities.runLanguageId(english_title); if (resLang != null) { String resL = resLang.getLang(); if (resL.equals(Language.EN)) { hasEnglishTitle = true; for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<title"); if ((bookTitle == null) && (journal == null)) { tei.append(" level=\"m\""); } else { tei.append(" level=\"a\""); } if (generateIDs) { String divID = KeyGen.getKey().substring(0,7); tei.append(" xml:id=\"_" + divID + "\""); } tei.append(" xml:lang=\"en\">") .append(TextUtilities.HTMLEncode(english_title)).append("</title>\n"); } } // if it's not something in English, we will write it anyway as note without type at the end } tei.append(toTEIAuthorBlock(2, config)); if (!StringUtils.isEmpty(doi)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<idno type=\"DOI\">" + TextUtilities.HTMLEncode(doi) + "</idno>\n"); } if (!StringUtils.isEmpty(arXivId)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<idno type=\"arXiv\">" + TextUtilities.HTMLEncode(arXivId) + "</idno>\n"); } if (!StringUtils.isEmpty(PMID)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<idno type=\"PMID\">" + TextUtilities.HTMLEncode(PMID) + "</idno>\n"); } if (!StringUtils.isEmpty(PMCID)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<idno type=\"PMCID\">" + TextUtilities.HTMLEncode(PMCID) + "</idno>\n"); } if (!StringUtils.isEmpty(PII)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<idno type=\"PII\">" + TextUtilities.HTMLEncode(PII) + "</idno>\n"); } if (!StringUtils.isEmpty(ark)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<idno type=\"ark\">" + TextUtilities.HTMLEncode(ark) + "</idno>\n"); } if (!StringUtils.isEmpty(istexId)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<idno type=\"istexId\">" + TextUtilities.HTMLEncode(istexId) + "</idno>\n"); } if (!StringUtils.isEmpty(pubnum)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<idno>").append(TextUtilities.HTMLEncode(pubnum)).append("</idno>\n"); } if (!StringUtils.isEmpty(oaUrl)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<ptr type=\"open-access\" target=\"").append(TextUtilities.HTMLEncode(oaUrl)).append("\" />\n"); } if (!StringUtils.isEmpty(web)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<ptr target=\"").append(TextUtilities.HTMLEncode(web)).append("\" />\n"); } if (openAnalytic) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("</analytic>\n"); for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<monogr>\n"); } if (bookTitle != null) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<title level=\"m\""); if (generateIDs) { String divID = KeyGen.getKey().substring(0,7); tei.append(" xml:id=\"_" + divID + "\""); } tei.append(">" + TextUtilities.HTMLEncode(bookTitle) + "</title>\n"); if (!StringUtils.isEmpty(serieTitle)) { // in case the book is part of an indicated series for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<title level=\"s\""); if (generateIDs) { String divID = KeyGen.getKey().substring(0,7); tei.append(" xml:id=\"_" + divID + "\""); } tei.append(">" + TextUtilities.HTMLEncode(serieTitle) + "</title>\n"); } if (fullEditors != null && fullEditors.size()>0) { for(Person editor : fullEditors) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<editor>\n"); for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } String localString = editor.toTEI(false); localString = localString.replace(" xmlns=\"http://www.tei-c.org/ns/1.0\"", ""); tei.append(localString).append("\n"); for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("</editor>\n"); } } else if (!StringUtils.isEmpty(editors)) { //postProcessingEditors(); StringTokenizer st = new StringTokenizer(editors, ";"); if (st.countTokens() > 0) { while (st.hasMoreTokens()) { String editor = st.nextToken(); if (editor != null) editor = editor.trim(); for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<editor>" + TextUtilities.HTMLEncode(editor) + "</editor>\n"); } } else { if (editors != null) for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<editor>" + TextUtilities.HTMLEncode(editors) + "</editor>\n"); } } // in case the booktitle corresponds to a proceedings, we can try to indidate the meeting title String meeting = bookTitle; boolean meetLoc = false; if (event != null) meeting = event; else { meeting = meeting.trim(); for (String prefix : confPrefixes) { if (meeting.startsWith(prefix)) { meeting = meeting.replace(prefix, ""); meeting = meeting.trim(); meeting = TextUtilities.cleanField(meeting, false); for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<meeting>" + TextUtilities.HTMLEncode(meeting)); if ((location != null) || (town != null) || (country != null)) { tei.append("<address>"); if (town != null) { tei.append("<settlement>" + TextUtilities.HTMLEncode(town) + "</settlement>"); } if (country != null) { tei.append("<country>" + TextUtilities.HTMLEncode(country) + "</country>"); } if ((location != null) && (town == null) && (country == null)) { tei.append("<addrLine>" + TextUtilities.HTMLEncode(location) + "</addrLine>"); } tei.append("</address>"); meetLoc = true; } tei.append("</meeting>\n"); break; } //break; } } if (((location != null) || (town != null) || (country != null)) && (!meetLoc)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<meeting>"); tei.append("<address>"); if (town != null) { tei.append("<settlement>" + town + "</settlement>"); } if (country != null) { tei.append("<country>" + country + "</country>"); } if ((location != null) && (town == null) && (country == null)) { tei.append("<addrLine>" + TextUtilities.HTMLEncode(location) + "</addrLine>"); } tei.append("</address>"); tei.append("</meeting>\n"); } for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } if ((publication_date != null) || (pageRange != null) || (publisher != null) || (volumeBlock != null)) { tei.append("<imprint>\n"); } else tei.append("<imprint/>\n"); if (publisher != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<publisher>" + TextUtilities.HTMLEncode(publisher) + "</publisher>\n"); } if (normalized_publication_date != null) { if (normalized_publication_date.getYear() != -1) { String when = Date.toISOString(normalized_publication_date); if (when != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<date type=\"published\" when=\""); tei.append(when + "\""); if (publication_date != null && publication_date.length() > 0) { tei.append(">"); tei.append(TextUtilities.HTMLEncode(publication_date) ); tei.append("</date>\n"); } else { tei.append(" />\n"); } } } else if (this.getYear() != null) { String when = ""; if (this.getYear().length() == 1) when += "000" + this.getYear(); else if (this.getYear().length() == 2) when += "00" + this.getYear(); else if (this.getYear().length() == 3) when += "0" + this.getYear(); else if (this.getYear().length() == 4) when += this.getYear(); if (this.getMonth() != null) { if (this.getMonth().length() == 1) when += "-0" + this.getMonth(); else when += "-" + this.getMonth(); if (this.getDay() != null) { if (this.getDay().length() == 1) when += "-0" + this.getDay(); else when += "-" + this.getDay(); } } for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<date type=\"published\" when=\""); tei.append(when + "\""); if (publication_date != null && publication_date.length() > 0) { tei.append(">"); tei.append(TextUtilities.HTMLEncode(publication_date) ); tei.append("</date>\n"); } else { tei.append(" />\n"); } } else { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<date>" + TextUtilities.HTMLEncode(publication_date) + "</date>\n"); } } else if (publication_date != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<date>" + TextUtilities.HTMLEncode(publication_date) + "</date>\n"); } if (volumeBlock != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<biblScope unit=\"volume\">" + TextUtilities.HTMLEncode(volumeBlock) + "</biblScope>\n"); } if (!StringUtils.isEmpty(pageRange)) { StringTokenizer st = new StringTokenizer(pageRange, "--"); if (st.countTokens() == 2) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<biblScope unit=\"page\" from=\"" + TextUtilities.HTMLEncode(st.nextToken()) + "\" to=\"" + TextUtilities.HTMLEncode(st.nextToken()) + "\" />\n"); } else { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<biblScope unit=\"page\">" + TextUtilities.HTMLEncode(pageRange) + "</biblScope>\n"); } } if ((publication_date != null) || (pageRange != null) || (publisher != null) || (volumeBlock != null)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("</imprint>\n"); } } else if (!StringUtils.isEmpty(journal) || !StringUtils.isEmpty(serieTitle)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } if (!StringUtils.isEmpty(journal)) { tei.append("<title level=\"j\""); if (generateIDs) { String divID = KeyGen.getKey().substring(0,7); tei.append(" xml:id=\"_" + divID + "\""); } tei.append(">" + TextUtilities.HTMLEncode(journal) + "</title>\n"); if (!StringUtils.isEmpty(getJournalAbbrev())) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<title level=\"j\" type=\"abbrev\">" + TextUtilities.HTMLEncode(getJournalAbbrev()) + "</title>\n"); } } else if (!StringUtils.isEmpty(serieTitle)) { tei.append("<title level=\"s\""); if (generateIDs) { String divID = KeyGen.getKey().substring(0,7); tei.append(" xml:id=\"_" + divID + "\""); } tei.append(">" + TextUtilities.HTMLEncode(serieTitle) + "</title>\n"); } if (fullEditors != null && fullEditors.size()>0) { for(Person editor : fullEditors) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<editor>\n"); for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } String localString = editor.toTEI(false); localString = localString.replace(" xmlns=\"http://www.tei-c.org/ns/1.0\"", ""); tei.append(localString).append("\n"); for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("</editor>\n"); } } else if (!StringUtils.isEmpty(editors)) { //postProcessingEditors(); StringTokenizer st = new StringTokenizer(editors, ";"); if (st.countTokens() > 0) { while (st.hasMoreTokens()) { String editor = st.nextToken(); if (editor != null) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } editor = editor.trim(); tei.append("<editor>" + TextUtilities.HTMLEncode(editor) + "</editor>\n"); } } } else { if (!StringUtils.isEmpty(editors)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<editor>" + TextUtilities.HTMLEncode(editors) + "</editor>\n"); } } } if (!StringUtils.isEmpty(getISSN())) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<idno type=\"ISSN\">" + TextUtilities.HTMLEncode(getISSN()) + "</idno>\n"); } if (!StringUtils.isEmpty(getISSNe())) { if (!getISSNe().equals(getISSN())) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<idno type=\"ISSNe\">" + TextUtilities.HTMLEncode(getISSNe()) + "</idno>\n"); } } /*for (int i = 0; i < indent + 2; i++) { tei.append("\t"); }*/ if ((volumeBlock != null) | (issue != null) || (pageRange != null) || (publication_date != null) || (publisher != null)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<imprint>\n"); if (volumeBlock != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<biblScope unit=\"volume\">" + TextUtilities.HTMLEncode(volumeBlock) + "</biblScope>\n"); } if (issue != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<biblScope unit=\"issue\">" + TextUtilities.HTMLEncode(issue) + "</biblScope>\n"); } if (pageRange != null) { StringTokenizer st = new StringTokenizer(pageRange, "--"); if (st.countTokens() == 2) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<biblScope unit=\"page\" from=\"" + TextUtilities.HTMLEncode(st.nextToken()) + "\" to=\"" + TextUtilities.HTMLEncode(st.nextToken()) + "\" />\n"); } else { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<biblScope unit=\"page\">" + TextUtilities.HTMLEncode(pageRange) + "</biblScope>\n"); } } // date if (normalized_publication_date != null) { if (normalized_publication_date.getYear() != -1) { String when = Date.toISOString(normalized_publication_date); if (when != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<date type=\"published\" when=\""); tei.append(when + "\""); if (publication_date != null && publication_date.length() > 0) { tei.append(">"); tei.append(TextUtilities.HTMLEncode(publication_date) ); tei.append("</date>\n"); } else { tei.append(" />\n"); } } } else if (this.getYear() != null) { String when = ""; if (this.getYear().length() == 1) when += "000" + this.getYear(); else if (this.getYear().length() == 2) when += "00" + this.getYear(); else if (this.getYear().length() == 3) when += "0" + this.getYear(); else if (this.getYear().length() == 4) when += this.getYear(); if (this.getMonth() != null) { if (this.getMonth().length() == 1) when += "-0" + this.getMonth(); else when += "-" + this.getMonth(); if (this.getDay() != null) { if (this.getDay().length() == 1) when += "-0" + this.getDay(); else when += "-" + this.getDay(); } } for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<date type=\"published\" when=\""); tei.append(when + "\""); if (publication_date != null && publication_date.length() > 0) { tei.append(">"); tei.append(TextUtilities.HTMLEncode(publication_date) ); tei.append("</date>\n"); } else { tei.append(" />\n"); } } else { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<date>" + TextUtilities.HTMLEncode(publication_date) + "</date>\n"); } } else if (publication_date != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<date>" + TextUtilities.HTMLEncode(publication_date) + "</date>\n"); } if (getPublisher() != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<publisher>" + TextUtilities.HTMLEncode(getPublisher()) + "</publisher>\n"); } if (location != null && location.length()>0) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<pubPlace>" + TextUtilities.HTMLEncode(location) + "</pubPlace>\n"); } for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("</imprint>\n"); } else { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<imprint/>\n"); } } else { // not a journal and not something in a book... if (editors != null) { //postProcessingEditors(); StringTokenizer st = new StringTokenizer(editors, ";"); if (st.countTokens() > 0) { while (st.hasMoreTokens()) { String editor = st.nextToken(); if (editor != null) { editor = editor.trim(); for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<editor>" + TextUtilities.HTMLEncode(editor) + "</editor>\n"); } } } else { if (editors != null) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<editor>" + TextUtilities.HTMLEncode(editors) + "</editor>\n"); } } } for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } if ((publication_date != null) || (pageRange != null) || (location != null) || (publisher != null) || (volumeBlock != null)) { tei.append("<imprint>\n"); } else { tei.append("<imprint/>\n"); } // date if (normalized_publication_date != null) { if (normalized_publication_date.getYear() != -1) { String when = Date.toISOString(normalized_publication_date); if (when != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<date type=\"published\" when=\""); tei.append(when + "\""); if (publication_date != null && publication_date.length() > 0) { tei.append(">"); tei.append(TextUtilities.HTMLEncode(publication_date) ); tei.append("</date>\n"); } else { tei.append(" />\n"); } } } else if (this.getYear() != null) { String when = ""; if (this.getYear().length() == 1) when += "000" + this.getYear(); else if (this.getYear().length() == 2) when += "00" + this.getYear(); else if (this.getYear().length() == 3) when += "0" + this.getYear(); else if (this.getYear().length() == 4) when += this.getYear(); if (this.getMonth() != null) { if (this.getMonth().length() == 1) when += "-0" + this.getMonth(); else when += "-" + this.getMonth(); if (this.getDay() != null) { if (this.getDay().length() == 1) when += "-0" + this.getDay(); else when += "-" + this.getDay(); } } for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<date type=\"published\" when=\""); tei.append(when + "\""); if (publication_date != null && publication_date.length() > 0) { tei.append(">"); tei.append(TextUtilities.HTMLEncode(publication_date) ); tei.append("</date>\n"); } else { tei.append(" />\n"); } } else { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<date>" + TextUtilities.HTMLEncode(publication_date) + "</date>\n"); } } else if (publication_date != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<date>" + TextUtilities.HTMLEncode(publication_date) + "</date>\n"); } if (publisher != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<publisher>" + TextUtilities.HTMLEncode(publisher) + "</publisher>\n"); } if (volumeBlock != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<biblScope unit=\"volume\">" + TextUtilities.HTMLEncode(volumeBlock) + "</biblScope>\n"); } if (pageRange != null) { StringTokenizer st = new StringTokenizer(pageRange, "--"); if (st.countTokens() == 2) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<biblScope unit=\"page\" from=\"" + TextUtilities.HTMLEncode(st.nextToken()) + "\" to=\"" + TextUtilities.HTMLEncode(st.nextToken()) + "\" />\n"); } else { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<biblScope unit=\"page\">" + TextUtilities.HTMLEncode(pageRange) + "</biblScope>\n"); } } if (location != null) { for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<pubPlace>" + TextUtilities.HTMLEncode(location) + "</pubPlace>\n"); } if ((publication_date != null) || (pageRange != null) || (location != null) || (publisher != null) || (volumeBlock != null)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("</imprint>\n"); } } if (!StringUtils.isEmpty(institution)) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<respStmt>\n"); for (int i = 0; i < indent + 3; i++) { tei.append("\t"); } tei.append("<orgName>" + TextUtilities.HTMLEncode(institution) + "</orgName>\n"); for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("</respStmt>\n"); } for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("</monogr>\n"); if (submission != null) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<note type=\"submission\">" + TextUtilities.HTMLEncode(submission) + "</note>\n"); } if (getSubmissionDate() != null) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<date type=\"submission\">" + TextUtilities.HTMLEncode(getSubmissionDate()) + "</date>\n"); } if (getDownloadDate() != null) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<date type=\"download\">" + TextUtilities.HTMLEncode(getDownloadDate()) + "</date>\n"); } if (dedication != null) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<note type=\"dedication\">" + TextUtilities.HTMLEncode(dedication) + "</note>\n"); } if (book_type != null) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<note type=\"report_type\">" + TextUtilities.HTMLEncode(book_type) + "</note>\n"); } if (note != null) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<note>" + TextUtilities.HTMLEncode(note) + "</note>\n"); } if ((english_title != null) && (!hasEnglishTitle)) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<note>" + TextUtilities.HTMLEncode(english_title) + "</note>\n"); } if (subjects != null) { if (subjects.size() > 0) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<keywords scheme=\"hal\"><list>\n"); for (String subject : subjects) { for (int i = 0; i < indent + 2; i++) { tei.append("\t"); } tei.append("<item>" + TextUtilities.HTMLEncode(subject) + "</item>\n"); } tei.append("</list></keywords>\n"); } } // keywords here !! if (!StringUtils.isEmpty(getKeyword())) { String keywords = getKeyword(); if (keywords.startsWith("Categories and Subject Descriptors")) { int start = keywords.indexOf("Keywords"); if (start != -1) { String keywords1 = keywords.substring(0, start - 1); String keywords2 = keywords.substring(start + 9, keywords.length()); for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<keywords type=\"subject-headers\">" + TextUtilities.HTMLEncode(keywords1) + "</keywords>\n"); for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<keywords>" + TextUtilities.HTMLEncode(keywords2) + "</keywords>\n"); } else { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<keywords>" + TextUtilities.HTMLEncode(getKeyword()) + "</keywords>\n"); } } else for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<keywords>" + TextUtilities.HTMLEncode(getKeyword()) + "</keywords>\n"); } if (uri != null) { if (uri.startsWith("http://hal.")) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<idno type=\"HALid\">" + TextUtilities.HTMLEncode(uri) + "</idno>\n"); } else { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<idno>" + TextUtilities.HTMLEncode(uri) + "</idno>\n"); } } if (url != null) { if (url.startsWith("http://hal.")) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<idno type=\"HALFile\">" + TextUtilities.HTMLEncode(url) + "</idno>\n"); } } if (abstract_ != null) { if (abstract_.length() > 0) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } tei.append("<div type=\"abstract\">" + TextUtilities.HTMLEncode(abstract_) + "</div>\n"); } } if (config.getIncludeRawCitations() && !StringUtils.isEmpty(reference) ) { for (int i = 0; i < indent + 1; i++) { tei.append("\t"); } String localReference = TextUtilities.HTMLEncode(reference); localReference = localReference.replace("\n", " "); localReference = localReference.replaceAll("( )+", " "); tei.append("<note type=\"raw_reference\">" + localReference + "</note>\n"); } for (int i = 0; i < indent; i++) { tei.append("\t"); } tei.append("</biblStruct>\n"); } catch (Exception e) { throw new GrobidException("Cannot convert bibliographical item into a TEI, " + "because of nested exception.", e); } return tei.toString(); } /** * Export the bibliographical item into OpenURL 1.0. */ public String toOpenURL(String authors) { String openurl = ""; try { // general - independent from the type of bibliographical object //openurl += "url_ver=Z39.88-2004"; openurl += "ctx_ver=Z39.88-2004"; if (doi != null) { //openurl += "&rft.doi=" + HTMLEncode(DOI); openurl += "&rft_id=info:doi/" + URLEncoder.encode(doi, "UTF-8"); //openurl += "&rft.doi=" + URLEncoder.encode(DOI,"UTF-8"); // we can finish here openurl += "&url_ctx_fmt=info:ofi/fmt:kev:mtx:ctx&rft.genre=article "; return openurl; } // journal if ((bookTitle != null) || (journal != null)) { if (journal != null) openurl += "&rft_val_fmt=info:ofi/fmt:kev:mtx:journal"; openurl += "&rft.genre=article"; // ? always to be written ? if (ISSN != null) openurl += "&rft.issn=" + URLEncoder.encode(ISSN, "UTF-8"); if (title != null) openurl += "&rft.atitle=" + URLEncoder.encode(title, "UTF-8"); if (journal != null) openurl += "&rft.jtitle=" + URLEncoder.encode(journal, "UTF-8"); else if (bookTitle != null) openurl += "&rft.btitle=" + URLEncoder.encode(bookTitle, "UTF-8"); if (volumeBlock != null) openurl += "&rft.volume=" + URLEncoder.encode(volumeBlock, "UTF-8"); if (issue != null) openurl += "&rft.issue=" + URLEncoder.encode(issue, "UTF-8"); if (pageRange != null) { StringTokenizer st = new StringTokenizer(pageRange, "--"); if (st.countTokens() > 0) { if (st.hasMoreTokens()) { String spage = st.nextToken(); openurl += "&rft.spage=" + URLEncoder.encode(spage, "UTF-8"); } if (st.hasMoreTokens()) { String epage = st.nextToken(); openurl += "&rft.epage=" + URLEncoder.encode(epage, "UTF-8"); } } } } else { // book openurl += "&rft_val_fmt=info:ofi/fmt:kev:mtx:book"; if (ISBN13 != null) openurl += "&rft.isbn=" + URLEncoder.encode(ISBN13, "UTF-8"); if (title != null) openurl += "&rft.genre=book&rft.btitle=" + URLEncoder.encode(title, "UTF-8"); } if (publication_date != null) openurl += "&rft.date=" + URLEncoder.encode(publication_date, "UTF-8"); // year is enough! // authors if (authors != null) { String localAuthor = getFirstAuthorSurname(); if (localAuthor != null) { openurl += "&rft.aulast=" + URLEncoder.encode(localAuthor, "UTF-8"); } } openurl += "&url_ctx_fmt=info:ofi/fmt:kev:mtx:ctx"; } catch (Exception e) { throw new GrobidException("Cannot open url to DOI, because of nested exception.", e); } return openurl; } /** * Export the bibliographical item into a COinS (OpenURL ContextObject in SPAN). */ public String toCOinS() { String res = "<span class=\"Z3988\" title=\"" + toOpenURL(authors) + "\"></span>"; return res; } /** * Export the bibliographical item into an OpenURL with given link resolver address. */ public String toFullOpenURL(String linkResolver, String imageLinkResolver) { String res = "<a href=\"" + linkResolver + toOpenURL(authors) + "\" target=\"_blank\"><img src=\"" + imageLinkResolver + "\"/></a>"; return res; } public void setFirstAuthorSurname(String firstAuthorSurname) { this.firstAuthorSurname = firstAuthorSurname; } /** * Return the surname of the first author. */ public String getFirstAuthorSurname() { if (this.firstAuthorSurname != null) { return this.firstAuthorSurname; //return TextUtilities.HTMLEncode(this.firstAuthorSurname); } if (fullAuthors != null) { if (fullAuthors.size() > 0) { Person aut = fullAuthors.get(0); String sur = aut.getLastName(); if (sur != null) { if (sur.length() > 0) { this.firstAuthorSurname = sur; //return TextUtilities.HTMLEncode(sur); return sur; } } } } if (authors != null) { StringTokenizer st = new StringTokenizer(authors, ";"); if (st.countTokens() > 0) { if (st.hasMoreTokens()) { // we take just the first author String author = st.nextToken(); if (author != null) author = author.trim(); int ind = author.lastIndexOf(" "); if (ind != -1) { this.firstAuthorSurname = author.substring(ind + 1); //return TextUtilities.HTMLEncode(author.substring(ind + 1)); return author.substring(ind + 1); } else { this.firstAuthorSurname = author; //return TextUtilities.HTMLEncode(author); return author; } } } } return null; } /** * Attach existing recognized emails to authors (default) or editors */ public void attachEmails() { attachEmails(fullAuthors); } public void attachEmails(List<Person> folks) { // do we have an email field recognized? if (email == null) return; // we check if we have several emails in the field email = email.trim(); email = email.replace(" and ", "\t"); ArrayList<String> emailles = new ArrayList<String>(); StringTokenizer st0 = new StringTokenizer(email, "\t"); while (st0.hasMoreTokens()) { emailles.add(st0.nextToken().trim()); } List<String> sanitizedEmails = emailSanitizer.splitAndClean(emailles); if (sanitizedEmails != null) { authorEmailAssigner.assign(folks, sanitizedEmails); } } /** * Attach existing recognized emails to authors */ public void attachAuthorEmails() { attachEmails(fullAuthors); } /** * Attach existing recognized emails to editors */ public void attachEditorEmails() { attachEmails(fullEditors); } /** * Attach existing recognized affiliations to authors */ public void attachAffiliations() { if (fullAffiliations == null) { return; } if (fullAuthors == null) { return; } int nbAffiliations = fullAffiliations.size(); int nbAuthors = fullAuthors.size(); boolean hasMarker = false; // do we have markers in the affiliations? for (Affiliation aff : fullAffiliations) { if (aff.getMarker() != null) { hasMarker = true; break; } } if (nbAffiliations == 1) { // we distribute this affiliation to each author Affiliation aff = fullAffiliations.get(0); for (Person aut : fullAuthors) { aut.addAffiliation(aff); } aff.setFailAffiliation(false); } else if ((nbAuthors == 1) && (nbAffiliations > 1)) { // we put all the affiliations to the single author Person auth = fullAuthors.get(0); for (Affiliation aff : fullAffiliations) { auth.addAffiliation(aff); aff.setFailAffiliation(false); } } else if (hasMarker) { // we get the marker for each affiliation and try to find the related author in the // original author field for (Affiliation aff : fullAffiliations) { if (aff.getMarker() != null) { String marker = aff.getMarker(); int from = 0; int ind = 0; ArrayList<Integer> winners = new ArrayList<Integer>(); while (ind != -1) { ind = originalAuthors.indexOf(marker, from); boolean bad = false; if (ind != -1) { // we check if we have a digit/letter (1) matching incorrectly // a double digit/letter (11), or a special non-digit (*) matching incorrectly // a double special non-digit (**) if (marker.length() == 1) { if (Character.isDigit(marker.charAt(0))) { if (ind - 1 > 0) { if (Character.isDigit(originalAuthors.charAt(ind - 1))) { bad = true; } } if (ind + 1 < originalAuthors.length()) { if (Character.isDigit(originalAuthors.charAt(ind + 1))) { bad = true; } } } else if (Character.isLetter(marker.charAt(0))) { if (ind - 1 > 0) { if (Character.isLetter(originalAuthors.charAt(ind - 1))) { bad = true; } } if (ind + 1 < originalAuthors.length()) { if (Character.isLetter(originalAuthors.charAt(ind + 1))) { bad = true; } } } else if (marker.charAt(0) == '*') { if (ind - 1 > 0) { if (originalAuthors.charAt(ind - 1) == '*') { bad = true; } } if (ind + 1 < originalAuthors.length()) { if (originalAuthors.charAt(ind + 1) == '*') { bad = true; } } } } if (marker.length() == 2) { // case with ** as marker if ((marker.charAt(0) == '*') && (marker.charAt(1) == '*')) { if (ind - 2 > 0) { if ((originalAuthors.charAt(ind - 1) == '*') && (originalAuthors.charAt(ind - 2) == '*')) { bad = true; } } if (ind + 2 < originalAuthors.length()) { if ((originalAuthors.charAt(ind + 1) == '*') && (originalAuthors.charAt(ind + 2) == '*')) { bad = true; } } if ((ind - 1 > 0) && (ind + 1 < originalAuthors.length())) { if ((originalAuthors.charAt(ind - 1) == '*') && (originalAuthors.charAt(ind + 1) == '*')) { bad = true; } } } } } if ((ind != -1) && !bad) { // we find the associated author name String original = originalAuthors.toLowerCase(); int p = 0; int best = -1; int ind2 = -1; int bestDistance = 1000; for (Person aut : fullAuthors) { if (!winners.contains(Integer.valueOf(p))) { String lastname = aut.getLastName(); if (lastname != null) { lastname = lastname.toLowerCase(); ind2 = original.indexOf(lastname, ind2 + 1); int dist = Math.abs(ind - (ind2 + lastname.length())); if (dist < bestDistance) { best = p; bestDistance = dist; } } } p++; } // and we associate this affiliation to this author if (best != -1) { fullAuthors.get(best).addAffiliation(aff); aff.setFailAffiliation(false); winners.add(Integer.valueOf(best)); } from = ind + 1; } if (bad) { from = ind + 1; bad = false; } } } } } /*else if (nbAuthors == nbAffiliations) { // risky heuristics, we distribute in this case one affiliation per author // preserving author // sometimes 2 affiliations belong both to 2 authors, for these case, the layout // positioning should be studied for (int p = 0; p < nbAuthors; p++) { fullAuthors.get(p).addAffiliation(fullAffiliations.get(p)); System.out.println("attachment: " + p); System.out.println(fullAuthors.get(p)); fullAffiliations.get(p).setFailAffiliation(false); } }*/ } /** * Create the TEI encoding for the author+affiliation block for the current biblio object. */ public String toTEIAuthorBlock(int nbTag) { return toTEIAuthorBlock(nbTag, GrobidAnalysisConfig.defaultInstance()); } /** * Create the TEI encoding for the author+affiliation block for the current biblio object. */ public String toTEIAuthorBlock(int nbTag, GrobidAnalysisConfig config) { StringBuffer tei = new StringBuffer(); int nbAuthors = 0; int nbAffiliations = 0; int nbAddresses = 0; boolean withCoordinates = false; if (config != null && config.getGenerateTeiCoordinates() != null) { withCoordinates = config.getGenerateTeiCoordinates().contains("persName"); } if ( (collaboration != null) && ( (fullAuthors == null) || (fullAuthors.size() == 0) ) ) { // collaboration plays at the same time the role of author and affiliation TextUtilities.appendN(tei, '\t', nbTag); tei.append("<author>").append("\n"); TextUtilities.appendN(tei, '\t', nbTag+1); tei.append("<orgName type=\"collaboration\""); if (withCoordinates && (labeledTokens != null) ) { List<LayoutToken> collabTokens = labeledTokens.get("<collaboration>"); if (withCoordinates && (collabTokens != null) && (!collabTokens.isEmpty())) { tei.append(" coords=\"" + LayoutTokensUtil.getCoordsString(collabTokens) + "\""); } } tei.append(">").append(TextUtilities.HTMLEncode(collaboration)).append("</orgName>").append("\n"); TextUtilities.appendN(tei, '\t', nbTag); tei.append("</author>").append("\n"); return tei.toString(); } List<Person> auts = fullAuthors; Lexicon lexicon = Lexicon.getInstance(); List<Affiliation> affs = fullAffiliations; if (affs == null) nbAffiliations = 0; else nbAffiliations = affs.size(); if (auts == null) nbAuthors = 0; else nbAuthors = auts.size(); boolean failAffiliation = true; //if (getAuthors() != null) { if (auts != null) { failAffiliation = false; if (nbAuthors > 0) { int autRank = 0; int contactAut = -1; //check if we have a single author of contact for (Person author : auts) { if (author.getEmail() != null) { if (contactAut == -1) contactAut = autRank; else { contactAut = -1; break; } } autRank++; } autRank = 0; for (Person author : auts) { if (author.getLastName() != null) { if (author.getLastName().length() < 2) continue; } if ( (author.getFirstName() == null) && (author.getMiddleName() == null) && (author.getLastName() == null) ) { continue; } TextUtilities.appendN(tei, '\t', nbTag); tei.append("<author"); if (autRank == contactAut) { tei.append(" role=\"corresp\">\n"); } else tei.append(">\n"); TextUtilities.appendN(tei, '\t', nbTag + 1); String localString = author.toTEI(withCoordinates); localString = localString.replace(" xmlns=\"http://www.tei-c.org/ns/1.0\"", ""); tei.append(localString).append("\n"); if (author.getEmail() != null) { TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("<email>" + TextUtilities.HTMLEncode(author.getEmail()) + "</email>\n"); } if (author.getORCID() != null) { TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("<idno type=\"ORCID\">" + TextUtilities.HTMLEncode(author.getORCID()) + "</idno>\n"); } if (author.getAffiliations() != null) { for (Affiliation aff : author.getAffiliations()) { this.appendAffiliation(tei, nbTag + 1, aff, config, lexicon); } } else if (collaboration != null) { TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("<affiliation>\n"); TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<orgName type=\"collaboration\">" + TextUtilities.HTMLEncode(collaboration) + "</orgName>\n"); TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("</affiliation>\n"); } TextUtilities.appendN(tei, '\t', nbTag); tei.append("</author>\n"); autRank++; } } } // if the affiliations were not outputted with the authors, we add them here // (better than nothing!) if (affs != null) { for (Affiliation aff : affs) { if (aff.getFailAffiliation()) { // dummy <author> for TEI conformance TextUtilities.appendN(tei, '\t', nbTag); tei.append("<author>\n"); this.appendAffiliation(tei, nbTag + 1, aff, config, lexicon); TextUtilities.appendN(tei, '\t', nbTag); tei.append("</author>\n"); } } } else if (affiliation != null) { StringTokenizer st2 = new StringTokenizer(affiliation, ";"); int affiliationRank = 0; while (st2.hasMoreTokens()) { String aff = st2.nextToken(); TextUtilities.appendN(tei, '\t', nbTag); tei.append("<author>\n"); TextUtilities.appendN(tei, '\t', nbTag+1); tei.append("<affiliation>\n"); TextUtilities.appendN(tei, '\t', nbTag+2); tei.append("<orgName>" + TextUtilities.HTMLEncode(aff) + "</orgName>\n"); if (nbAddresses == nbAffiliations) { int addressRank = 0; if (address != null) { StringTokenizer st3 = new StringTokenizer(address, ";"); while (st3.hasMoreTokens()) { String add = st3.nextToken(); if (addressRank == affiliationRank) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<address><addrLine>" + TextUtilities.HTMLEncode(add) + "</addrLine></address>\n"); break; } addressRank++; } } } TextUtilities.appendN(tei, '\t', nbTag+1); tei.append("</affiliation>\n"); TextUtilities.appendN(tei, '\t', nbTag); tei.append("</author>\n"); affiliationRank++; } } return tei.toString(); } private void appendAffiliation( StringBuffer tei, int nbTag, Affiliation aff, GrobidAnalysisConfig config, Lexicon lexicon ) { TextUtilities.appendN(tei, '\t', nbTag); tei.append("<affiliation"); if (aff.getKey() != null) tei.append(" key=\"").append(aff.getKey()).append("\""); tei.append(">\n"); if ( config.getIncludeRawAffiliations() && !StringUtils.isEmpty(aff.getRawAffiliationString()) ) { TextUtilities.appendN(tei, '\t', nbTag + 1); String encodedRawAffiliationString = TextUtilities.HTMLEncode( aff.getRawAffiliationString() ); tei.append("<note type=\"raw_affiliation\">"); LOGGER.debug("marker: {}", aff.getMarker()); if (StringUtils.isNotEmpty(aff.getMarker())) { tei.append("<label>"); tei.append(TextUtilities.HTMLEncode(aff.getMarker())); tei.append("</label> "); } tei.append(encodedRawAffiliationString); tei.append("</note>\n"); } if (aff.getDepartments() != null) { if (aff.getDepartments().size() == 1) { TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("<orgName type=\"department\">" + TextUtilities.HTMLEncode(aff.getDepartments().get(0)) + "</orgName>\n"); } else { int q = 1; for (String depa : aff.getDepartments()) { TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("<orgName type=\"department\" key=\"dep" + q + "\">" + TextUtilities.HTMLEncode(depa) + "</orgName>\n"); q++; } } } if (aff.getLaboratories() != null) { if (aff.getLaboratories().size() == 1) { TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("<orgName type=\"laboratory\">" + TextUtilities.HTMLEncode(aff.getLaboratories().get(0)) + "</orgName>\n"); } else { int q = 1; for (String labo : aff.getLaboratories()) { TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("<orgName type=\"laboratory\" key=\"lab" + q + "\">" + TextUtilities.HTMLEncode(labo) + "</orgName>\n"); q++; } } } if (aff.getInstitutions() != null) { if (aff.getInstitutions().size() == 1) { TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("<orgName type=\"institution\">" + TextUtilities.HTMLEncode(aff.getInstitutions().get(0)) + "</orgName>\n"); } else { int q = 1; for (String inst : aff.getInstitutions()) { TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("<orgName type=\"institution\" key=\"instit" + q + "\">" + TextUtilities.HTMLEncode(inst) + "</orgName>\n"); q++; } } } if ((aff.getAddressString() != null) || (aff.getAddrLine() != null) || (aff.getPostBox() != null) || (aff.getPostCode() != null) || (aff.getSettlement() != null) || (aff.getRegion() != null) || (aff.getCountry() != null)) { TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("<address>\n"); if (aff.getAddressString() != null) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<addrLine>" + TextUtilities.HTMLEncode(aff.getAddressString()) + "</addrLine>\n"); } if (aff.getAddrLine() != null) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<addrLine>" + TextUtilities.HTMLEncode(aff.getAddrLine()) + "</addrLine>\n"); } if (aff.getPostBox() != null) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<postBox>" + TextUtilities.HTMLEncode(aff.getPostBox()) + "</postBox>\n"); } if (aff.getPostCode() != null) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<postCode>" + TextUtilities.HTMLEncode(aff.getPostCode()) + "</postCode>\n"); } if (aff.getSettlement() != null) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<settlement>" + TextUtilities.HTMLEncode(aff.getSettlement()) + "</settlement>\n"); } if (aff.getRegion() != null) { TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<region>" + TextUtilities.HTMLEncode(aff.getRegion()) + "</region>\n"); } if (aff.getCountry() != null) { String code = lexicon.getCountryCode(aff.getCountry()); TextUtilities.appendN(tei, '\t', nbTag + 2); tei.append("<country"); if (code != null) tei.append(" key=\"" + code + "\""); tei.append(">" + TextUtilities.HTMLEncode(aff.getCountry()) + "</country>\n"); } TextUtilities.appendN(tei, '\t', nbTag + 1); tei.append("</address>\n"); } TextUtilities.appendN(tei, '\t', nbTag); tei.append("</affiliation>\n"); } private static volatile String possiblePreFixPageNumber = "[A-Ze]?"; private static volatile String possiblePostFixPageNumber = "[A-Z]?"; private static volatile Pattern page = Pattern.compile("("+possiblePreFixPageNumber+"\\d+"+possiblePostFixPageNumber+")"); private static volatile Pattern pageDigits = Pattern.compile("\\d+"); /** * Try to normalize the page range, which can be expressed in abbreviated forms and with letter prefix. */ public void postProcessPages() { if (pageRange != null) { Matcher matcher = page.matcher(pageRange); if (matcher.find()) { // below for the string form of the page numbers String firstPage = null; String lastPage = null; // alphaPrefix or alphaPostfix are for storing possible alphabetical prefix or postfix to page number, // e.g. "L" in Smith, G. P., Mazzotta, P., Okabe, N., et al. 2016, MNRAS, 456, L74 // or "D" in "Am J Cardiol. 1999, 83:143D-150D. 10.1016/S0002-9149(98)01016-9" String alphaPrefixStart = null; String alphaPrefixEnd = null; String alphaPostfixStart = null; String alphaPostfixEnd = null; // below for the integer form of the page numbers (part in case alphaPrefix is not null) int beginPage = -1; int endPage = -1; if (matcher.groupCount() > 0) { firstPage = matcher.group(0); } if (firstPage != null) { try { beginPage = Integer.parseInt(firstPage); } catch (Exception e) { beginPage = -1; } if (beginPage != -1) { pageRange = "" + beginPage; } else { pageRange = firstPage; // try to get the numerical part of the page number, useful for later Matcher matcher2 = pageDigits.matcher(firstPage); if (matcher2.find()) { try { beginPage = Integer.parseInt(matcher2.group()); if (firstPage.length() > 0) { alphaPrefixStart = firstPage.substring(0,1); // is it really alphabetical character? if (!Pattern.matches(possiblePreFixPageNumber, alphaPrefixStart)) { alphaPrefixStart = null; // look at postfix alphaPostfixStart = firstPage.substring(firstPage.length()-1,firstPage.length()); if (!Pattern.matches(possiblePostFixPageNumber, alphaPostfixStart)) { alphaPostfixStart = null; } } } } catch (Exception e) { beginPage = -1; } } } if (matcher.find()) { if (matcher.groupCount() > 0) { lastPage = matcher.group(0); } if (lastPage != null) { try { endPage = Integer.parseInt(lastPage); } catch (Exception e) { endPage = -1; } if (endPage == -1) { // try to get the numerical part of the page number, to be used for later Matcher matcher2 = pageDigits.matcher(lastPage); if (matcher2.find()) { try { endPage = Integer.parseInt(matcher2.group()); if (lastPage.length() > 0) { alphaPrefixEnd = lastPage.substring(0,1); // is it really alphabetical character? if (!Pattern.matches(possiblePreFixPageNumber, alphaPrefixEnd)) { alphaPrefixEnd = null; // look at postfix alphaPostfixEnd = lastPage.substring(lastPage.length()-1,lastPage.length()); if (!Pattern.matches(possiblePostFixPageNumber, alphaPostfixEnd)) { alphaPostfixEnd = null; } } } } catch (Exception e) { endPage = -1; } } } if ( (endPage != -1) && (endPage < beginPage)) { // there are two possibilities: // - the substitution, e.g. 433–8 -> 433--438, for example American Medical Association citation style // - the addition, e.g. 433–8 -> 433--441 // unfortunately, it depends on the citation style // we try to guess/refine the re-composition of pages if (endPage >= 50) { // we assume no journal articles have more than 49 pages and is expressed as addition, // so it's a substitution int upperBound = firstPage.length() - lastPage.length(); if (upperBound<firstPage.length() && upperBound > 0) lastPage = firstPage.substring(0, upperBound) + lastPage; pageRange += "--" + lastPage; } else { if (endPage < 10) { // case 1 digit for endPage // last digit of begin page int lastDigitBeginPage = beginPage % 10; // if digit of lastPage lower than last digit of beginPage, it's an addition for sure if (endPage < lastDigitBeginPage) endPage = beginPage + endPage; else { // otherwise defaulting to substitution endPage = beginPage - lastDigitBeginPage + endPage; } } else if (endPage < 50) { // case 2 digit for endPage, we apply a similar heuristics int lastDigitBeginPage = beginPage % 100; if (endPage < lastDigitBeginPage) endPage = beginPage + endPage; else { // otherwise defaulting to substitution endPage = beginPage - lastDigitBeginPage + endPage; } } // we assume there is no article of more than 99 pages expressed in this abbreviated way // (which are for journal articles only, so short animals) if (alphaPrefixEnd != null) pageRange += "--" + alphaPrefixEnd + endPage; else if (alphaPostfixEnd != null) pageRange += "--" + endPage + alphaPostfixEnd; else pageRange += "--" + endPage; } } else if ((endPage != -1)) { if (alphaPrefixEnd != null) pageRange += "--" + alphaPrefixEnd + endPage; else if (alphaPostfixEnd != null) pageRange += "--" + endPage + alphaPostfixEnd; else pageRange += "--" + lastPage; } else { pageRange += "--" + lastPage; } } } } } } } /** * Correct/add identifiers of the first biblio item based on the second one */ public static void injectIdentifiers(BiblioItem destination, BiblioItem source) { destination.setDOI(source.getDOI()); // optionally associated strong identifiers are also injected destination.setPMID(source.getPMID()); destination.setPMCID(source.getPMCID()); destination.setPII(source.getPII()); destination.setIstexId(source.getIstexId()); destination.setArk(source.getArk()); } /** * Correct fields of the first biblio item based on the second one and the reference string * * @param bib extracted from document * @param bibo fetched from metadata provider (biblioglutton, crossref..) */ public static void correct(BiblioItem bib, BiblioItem bibo) { //System.out.println("correct: \n" + bib.toTEI(0)); //System.out.println("with: \n" + bibo.toTEI(0)); if (bibo.getDOI() != null) bib.setDOI(bibo.getDOI()); if (bibo.getPMID() != null) bib.setPMID(bibo.getPMID()); if (bibo.getPMCID() != null) bib.setPMCID(bibo.getPMCID()); if (bibo.getPII() != null) bib.setPII(bibo.getPII()); if (bibo.getIstexId() != null) bib.setIstexId(bibo.getIstexId()); if (bibo.getArk() != null) bib.setArk(bibo.getArk()); if (bibo.getOAURL() != null) bib.setOAURL(bibo.getOAURL()); if (bibo.getJournal() != null) { bib.setJournal(bibo.getJournal()); // document type consistency (correction might change overall item type, and some // fields become unconsistent) if (bibo.getBookTitle() == null) { bib.setBookTitle(null); } } if (bibo.getAuthors() != null) bib.setAuthors(bibo.getAuthors()); if (bibo.getEditors() != null) bib.setEditors(bibo.getEditors()); if (bibo.getBookTitle() != null) { bib.setBookTitle(bibo.getBookTitle()); // document type consistency if (bibo.getJournal() == null) { bib.setJournal(null); } } if (bibo.getVolume() != null) bib.setVolume(bibo.getVolume()); if (bibo.getVolumeBlock() != null) bib.setVolumeBlock(bibo.getVolumeBlock(), false); if (bibo.getIssue() != null) bib.setIssue(bibo.getIssue()); if (bibo.getBeginPage() != -1) bib.setBeginPage(bibo.getBeginPage()); if (bibo.getEndPage() != -1) bib.setEndPage(bibo.getEndPage()); if (bibo.getPageRange() != null) bib.setPageRange(bibo.getPageRange()); if (bibo.getPublicationDate() != null) bib.setPublicationDate(bibo.getPublicationDate()); if (bibo.getSubmissionDate() != null) bib.setSubmissionDate(bibo.getSubmissionDate()); if (bibo.getDownloadDate() != null) bib.setDownloadDate(bibo.getDownloadDate()); if (bibo.getNormalizedPublicationDate() != null) { if (bib.getNormalizedPublicationDate() != null) { bib.mergeNormalizedPublicationDate(bibo.getNormalizedPublicationDate()); } else { bib.setNormalizedPublicationDate(bibo.getNormalizedPublicationDate()); } } if (bibo.getYear() != null) bib.setYear(bibo.getYear()); if (bibo.getMonth() != null) bib.setMonth(bibo.getMonth()); if (bibo.getDay() != null) bib.setDay(bibo.getDay()); if (bibo.getE_Year() != null) bib.setE_Year(bibo.getE_Year()); if (bibo.getE_Month() != null) bib.setE_Month(bibo.getE_Month()); if (bibo.getE_Day() != null) bib.setE_Day(bibo.getE_Day()); if (bibo.getA_Year() != null) bib.setA_Year(bibo.getA_Year()); if (bibo.getA_Month() != null) bib.setA_Month(bibo.getA_Month()); if (bibo.getA_Day() != null) bib.setA_Day(bibo.getA_Day()); if (bibo.getS_Year() != null) bib.setS_Year(bibo.getS_Year()); if (bibo.getS_Month() != null) bib.setS_Month(bibo.getS_Month()); if (bibo.getS_Day() != null) bib.setS_Day(bibo.getS_Day()); if (bibo.getD_Year() != null) bib.setD_Year(bibo.getD_Year()); if (bibo.getD_Month() != null) bib.setD_Month(bibo.getD_Month()); if (bibo.getD_Day() != null) bib.setD_Day(bibo.getD_Day()); if (bibo.getLocation() != null) bib.setLocation(bibo.getLocation()); if (bibo.getPublisher() != null) bib.setPublisher(bibo.getPublisher()); if (bibo.getTitle() != null) { bib.setTitle(bibo.getTitle()); } if (bibo.getArticleTitle() != null) { bib.setArticleTitle(bibo.getArticleTitle()); } if (bibo.getJournalAbbrev() != null) { bib.setJournalAbbrev(bibo.getJournalAbbrev()); } if (bibo.getISSN() != null) bib.setISSN(bibo.getISSN()); if (bibo.getISSNe() != null) bib.setISSNe(bibo.getISSNe()); if (bibo.getISBN10() != null) bib.setISBN10(bibo.getISBN10()); if (bibo.getISBN13() != null) bib.setISBN13(bibo.getISBN13()); if (bibo.getItem() != -1) { bib.setItem(bibo.getItem()); } if (bibo.getCollaboration() != null) { bib.setCollaboration(bibo.getCollaboration()); } // authors present in fullAuthors list should be in the existing resources // at least the corresponding author if (!CollectionUtils.isEmpty(bibo.getFullAuthors())) { if (CollectionUtils.isEmpty(bib.getFullAuthors())) bib.setFullAuthors(bibo.getFullAuthors()); else if (bibo.getFullAuthors().size() == 1) { // we have the corresponding author // check if the author exists in the obtained list Person auto = (Person) bibo.getFullAuthors().get(0); List<Person> auts = bib.getFullAuthors(); if (auts != null) { for (Person aut : auts) { if (StringUtils.isNotBlank(aut.getLastName()) && StringUtils.isNotBlank(auto.getLastName())) { if (aut.getLastName().toLowerCase().equals(auto.getLastName().toLowerCase())) { if (StringUtils.isBlank(aut.getFirstName()) || (auto.getFirstName() != null && aut.getFirstName().length() <= auto.getFirstName().length() && auto.getFirstName().toLowerCase().startsWith(aut.getFirstName().toLowerCase()))) { aut.setFirstName(auto.getFirstName()); aut.setCorresp(true); if (StringUtils.isNotBlank(auto.getEmail())) aut.setEmail(auto.getEmail()); // should we also check the country ? affiliation? if (StringUtils.isNotBlank(auto.getMiddleName()) && (StringUtils.isBlank(aut.getMiddleName()))) aut.setMiddleName(auto.getMiddleName()); // crossref is considered more reliable than PDF annotations aut.setORCID(auto.getORCID()); } } } } } } else if (bibo.getFullAuthors().size() > 1) { // we have the complete list of authors so we can take them from the second // biblio item and merge some possible extra from the first when a match is // reliable for (Person aut : bibo.getFullAuthors()) { // try to find the author in the first item (we know it's not empty) for (Person aut2 : bib.getFullAuthors()) { if (StringUtils.isNotBlank(aut2.getLastName())) { String aut2_lastname = aut2.getLastName().toLowerCase(); if (StringUtils.isNotBlank(aut.getLastName())) { String aut_lastname = aut.getLastName().toLowerCase(); if (aut_lastname.equals(aut2_lastname)) { // check also first name if present - at least for the initial if ( StringUtils.isBlank(aut2.getFirstName()) || (StringUtils.isNotBlank(aut2.getFirstName()) && StringUtils.isNotBlank(aut.getFirstName())) ) { // we have no first name or a match (full first name) if ( StringUtils.isBlank(aut2.getFirstName()) || aut.getFirstName().equals(aut2.getFirstName()) || ( aut.getFirstName().length() == 1 && aut.getFirstName().equals(aut2.getFirstName().substring(0,1)) ) ) { // we have a match (full or initial) if (StringUtils.isNotBlank(aut2.getFirstName()) && aut2.getFirstName().length() > aut.getFirstName().length()) aut.setFirstName(aut2.getFirstName()); if (StringUtils.isBlank(aut.getMiddleName())) aut.setMiddleName(aut2.getMiddleName()); if (StringUtils.isBlank(aut.getTitle())) aut.setTitle(aut2.getTitle()); if (StringUtils.isBlank(aut.getSuffix())) aut.setSuffix(aut2.getSuffix()); if (StringUtils.isBlank(aut.getEmail())) aut.setEmail(aut2.getEmail()); if(!CollectionUtils.isEmpty(aut2.getAffiliations())) aut.setAffiliations(aut2.getAffiliations()); if (!CollectionUtils.isEmpty(aut2.getAffiliationBlocks())) aut.setAffiliationBlocks(aut2.getAffiliationBlocks()); if (!CollectionUtils.isEmpty(aut2.getAffiliationMarkers())) aut.setAffiliationMarkers(aut2.getAffiliationMarkers()); if (!CollectionUtils.isEmpty(aut2.getMarkers())) aut.setMarkers(aut2.getMarkers()); if (!CollectionUtils.isEmpty(aut2.getLayoutTokens())) aut.setLayoutTokens(aut2.getLayoutTokens()); // crossref is considered more reliable than PDF annotations, so ORCIDs are not overwritten break; } } } } } } } bib.setFullAuthors(bibo.getFullAuthors()); } } } /** * Check is the biblio item can be considered as a minimally valid bibliographical reference. * A certain minimal number of core metadata have to be instanciated. Otherwise, the biblio * item can be considered as "garbage" extracted incorrectly. */ public boolean rejectAsReference() { boolean titleSet = true; if ( (title == null) && (bookTitle == null) && (journal == null) && (ISSN == null) && (ISBN13 == null) && (ISBN10 == null)) titleSet = false; boolean authorSet = true; if (fullAuthors == null && collaboration == null) authorSet = false; // normally properties authors and authorList are null in the current Grobid version if (!titleSet && !authorSet && (url == null) && (doi == null)) return true; else return false; } public String getTeiId() { return teiId; } public int getOrdinal() { return ordinal; } public void setOrdinal(int ordinal) { this.ordinal = ordinal; } public void setCoordinates(List<BoundingBox> coordinates) { this.coordinates = coordinates; } public List<BoundingBox> getCoordinates() { return coordinates; } public Map<String, List<LayoutToken>> getLabeledTokens() { return labeledTokens; } public void setLabeledTokens(Map<String, List<LayoutToken>> labeledTokens) { this.labeledTokens = labeledTokens; } public List<LayoutToken> getLayoutTokens(TaggingLabel headerLabel) { if (labeledTokens == null) { LOGGER.debug("labeledTokens is null"); return null; } if (headerLabel.getLabel() == null) { LOGGER.debug("headerLabel.getLabel() is null"); return null; } return labeledTokens.get(headerLabel.getLabel()); } public void setLayoutTokensForLabel(List<LayoutToken> tokens, TaggingLabel headerLabel) { if (labeledTokens == null) labeledTokens = new TreeMap<>(); labeledTokens.put(headerLabel.getLabel(), tokens); } public void generalResultMapping(String labeledResult, List<LayoutToken> tokenizations) { if (labeledTokens == null) labeledTokens = new TreeMap<>(); TaggingTokenClusteror clusteror = new TaggingTokenClusteror(GrobidModels.HEADER, labeledResult, tokenizations); List<TaggingTokenCluster> clusters = clusteror.cluster(); for (TaggingTokenCluster cluster : clusters) { if (cluster == null) { continue; } TaggingLabel clusterLabel = cluster.getTaggingLabel(); List<LayoutToken> clusterTokens = cluster.concatTokens(); List<LayoutToken> theList = labeledTokens.get(clusterLabel.getLabel()); theList = theList == null ? new ArrayList<>() : theList; theList.addAll(clusterTokens); labeledTokens.put(clusterLabel.getLabel(), theList); } } public List<LayoutToken> getAuthorsTokensWorkingCopy() { return authorsTokensWorkingCopy; } public List<LayoutToken> getAbstractTokensWorkingCopy() { return abstractTokensWorkingCopy; } public String getAvailabilityStmt() { return availabilityStmt; } public void setAvailabilityStmt(String availabilityStmt) { this.availabilityStmt = availabilityStmt; } }
161,418
36.20189
156
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/Passage.java
package org.grobid.core.data; /** * Class for managing passage of citations. * */ public class Passage { private int pageBegin = -1; private int pageEnd = -1; private int lineBegin = -1; private int lineEnd = -1; private String colBegin = null; private String colEnd = null; private String figure = null; private String table = null; private String rawPassage = null; public int getPageBegin() { return pageBegin; } public int getPageEnd() { return pageEnd; } public int getLineBegin() { return lineBegin; } public int getLineEnd() { return lineEnd; } public String getColBegin() { return colBegin; } public String getColEnd() { return colEnd; } public String getFigure() { return figure; } public String getTable() { return table; } public String getRawPassage() { return rawPassage; } public void setRawPassage(String s) { rawPassage = s; } }
1,058
16.360656
43
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/Keyword.java
package org.grobid.core.data; import java.util.ArrayList; import java.util.List; import org.grobid.core.utilities.TextUtilities; /** * Class for representing a keyword extracted from a publication. * */ public class Keyword { private String keyword = null; private String type = null; public Keyword(String key) { keyword = key; } public Keyword(String key, String typ) { keyword = key; type = typ; } public String getKeyword() { return keyword; } public void setKeyword(String key) { keyword = key; } public String getType() { return type; } public void setType(String typ) { type = typ; } public boolean notNull() { if (keyword == null) return false; else return true; } public String toString() { String res = ""; if (keyword != null) res += keyword + " "; if (type != null) { res += " (type:" + type + ")"; } return res.trim(); } public String toTEI() { if (keyword == null) { return null; } String res = "<term>" + TextUtilities.HTMLEncode(keyword) + "</term>"; return res; } }
1,262
18.136364
78
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/table/Cell.java
package org.grobid.core.data.table; import java.util.ArrayList; import java.util.List; public class Cell extends Line { private int positionRow = -1; private int positionColumn = -1; private int colspan = 1; private boolean merged = false; public boolean linePartInBorders(LinePart linePart) { if (this.getContent().isEmpty()) return true; if ((this.getLeft() > linePart.getRight()) || this.getRight() < linePart.getLeft()) return false; return true; } public int getColspan() { return this.colspan; } public void setColspan(int colspan) { this.colspan = colspan; } public int getPositionRow() { return positionRow; } public void setPositionRow(int positionRow) { this.positionRow = positionRow; } public int getPositionColumn() { return positionColumn; } public void setPositionColumn(int positionColumn) { this.positionColumn = positionColumn; } public void setRight(double rightpos) { this.right = rightpos; } public void setLeft(double leftpos) { this.left = leftpos; } public void setMerged(boolean merged) { this.merged = merged; } public boolean isMerged() { return this.merged; } }
1,159
18.016393
100
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/table/Row.java
package org.grobid.core.data.table; import java.util.*; public class Row extends LinePart { private List<Cell> cells = new ArrayList<>(); public void add(Cell cell) { cells.add(cell); setTop(cell); setBottom(cell); setLeft(cell); setRight(cell); } private void setTop(Cell cell) { double cellTop = cell.getTop(); if (top == GROBID_TOKEN_DEFAULT_DOUBLE || top > cellTop) { top = cellTop; } } private void setBottom(Cell cell) { double cellBottom = cell.getBottom(); if (bottom == GROBID_TOKEN_DEFAULT_DOUBLE || bottom < cellBottom) { bottom = cellBottom; } } private void setLeft(Cell cell) { double cellLeft = cell.getLeft(); if (left == GROBID_TOKEN_DEFAULT_DOUBLE || left > cellLeft) { left = cellLeft; } } private void setRight(Cell cell) { double cellRight = cell.getRight(); if (right == GROBID_TOKEN_DEFAULT_DOUBLE || right < cellRight) { right = cellRight; } } public List<Cell> getContent() { return this.cells; } @Override public boolean isEmpty() { return this.getContent().size() == 0; } /** * * @param lines Lines, detected by the algorithm, see Line::extractLines * @return rows containing cells; doesn't include empty cells */ public static List<Row> extractRows(List<Line> lines) { List<Row> rows = new ArrayList<>(); for (Line line: lines) { if (line.getText().isEmpty()) continue; List<LinePart> lineContent = line.getContent(); Row row = new Row(); Cell currentCell = null; int i = lineContent.size() - 1; while (!lineContent.isEmpty() && i >= 0) { LinePart linePart = lineContent.get(i); if (currentCell == null) { currentCell = new Cell(); row.add(currentCell); currentCell.add(linePart); lineContent.remove(i); i--; continue; } if (currentCell.linePartInBorders(linePart)) { currentCell.add(linePart); lineContent.remove(i); i = lineContent.size() - 1; // return to the first item and recheck borders continue; } if (i == 0) { currentCell = null; i = lineContent.size() - 1; } else { i--; } } row.getContent().sort(Comparator.comparingDouble(LinePart::getLeft)); rows.add(row); } return rows; } /** * * @param rows extracted rows * @param columnCount the maximum number of columns in the table * Identifies and inserts empty cells into the table based on the left and right margins of the content inside columns. * */ public static void insertEmptyCells(List<Row> rows, int columnCount) { int columnNumber = 0; while (columnNumber < columnCount) { double currentLeftMost = Cell.GROBID_TOKEN_DEFAULT_DOUBLE; double nextColumnLeftMost = Cell.GROBID_TOKEN_DEFAULT_DOUBLE; for (Row row: rows) { List<Cell> cells = row.getContent(); if (columnNumber > cells.size() - 1) continue; Cell cell = cells.get(columnNumber); if (currentLeftMost == Cell.GROBID_TOKEN_DEFAULT_DOUBLE || currentLeftMost > cell.getLeft()) { currentLeftMost = cell.getLeft(); } if ((columnNumber + 1) < cells.size()) { Cell nextColumnCell = cells.get(columnNumber + 1); if (nextColumnLeftMost == Cell.GROBID_TOKEN_DEFAULT_DOUBLE || nextColumnLeftMost > nextColumnCell.getLeft()) { nextColumnLeftMost = nextColumnCell.getLeft(); } } } double currentRightMost = Cell.GROBID_TOKEN_DEFAULT_DOUBLE; for (Row row: rows) { List<Cell> cells = row.getContent(); if (columnNumber > cells.size() - 1) continue; Cell cell = cells.get(columnNumber); if (nextColumnLeftMost != Cell.GROBID_TOKEN_DEFAULT_DOUBLE) { if (cell.getRight() < nextColumnLeftMost && (currentRightMost < cell.getRight())) { currentRightMost = cell.getRight(); } } } for (int i = 0; i < rows.size(); i++) { Row row = rows.get(i); List<Cell> cells = row.getContent(); if (columnNumber > cells.size() - 1) { // insert empty cell for premature ended rows Cell newCell = new Cell(); newCell.setLeft(currentLeftMost); newCell.setRight(currentRightMost); newCell.setPositionRow(i); newCell.setPositionColumn(columnNumber); row.add(newCell); continue; } Cell cell = cells.get(columnNumber); if (cell.getRight() <= currentRightMost || currentRightMost == Cell.GROBID_TOKEN_DEFAULT_DOUBLE) { cell.setPositionRow(i); cell.setPositionColumn(columnNumber); } else if (cell.getLeft() > currentRightMost) { // empty cell Cell newCell = new Cell(); newCell.setRight(cell.getRight()); newCell.setLeft(currentLeftMost); newCell.setPositionRow(i); newCell.setPositionColumn(columnNumber); row.getContent().add(columnNumber, newCell); } else { Cell newCell = new Cell(); newCell.setRight(cell.getRight()); newCell.setLeft(nextColumnLeftMost); newCell.setPositionRow(i); newCell.setPositionColumn(columnNumber + 1); newCell.setMerged(true); row.getContent().add(columnNumber + 1, newCell); // current cell spans on several columns int z = columnNumber; while (z >= 0) { Cell colspanCell = cells.get(z); // find the cell that spans on several rows, it's the first non-empty cell. if (!colspanCell.isEmpty()) { colspanCell.setColspan(colspanCell.getColspan()+1); if (colspanCell.getPositionRow() == -1) { colspanCell.setPositionRow(z); } if (colspanCell.getPositionColumn() == -1) { colspanCell.setPositionColumn(columnNumber); } break; } z--; } } } columnNumber++; } } public static int columnCount(List<Row> rows) { int columnCount = 0; for (Row row: rows) { int cellNumber = row.getContent().size(); if (cellNumber > columnCount) { columnCount = cellNumber; } } return columnCount; } public static void mergeMulticolumnCells(List<Row> rows) { for (Row row: rows) { List<Cell> cells = row.getContent(); for (int i = cells.size() - 1; i >= 0; i--) { Cell cell = cells.get(i); if (cell.isMerged()) { row.getContent().remove(i); } } } } }
6,158
26.132159
120
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/table/LinePart.java
package org.grobid.core.data.table; import org.grobid.core.layout.LayoutToken; import java.util.ArrayList; import java.util.List; public class LinePart { public static final double GROBID_TOKEN_DEFAULT_DOUBLE = -1.0; private List<LayoutToken> contentTokens = new ArrayList<>(); double top = GROBID_TOKEN_DEFAULT_DOUBLE; double bottom = GROBID_TOKEN_DEFAULT_DOUBLE; double left = GROBID_TOKEN_DEFAULT_DOUBLE; double right = GROBID_TOKEN_DEFAULT_DOUBLE; public void add(LayoutToken contentToken) { contentTokens.add(contentToken); setTop(contentToken); setBottom(contentToken); setLeft(contentToken); setRight(contentToken); } private void setTop(LayoutToken contentToken) { double tokenY = contentToken.getY(); if (tokenY == GROBID_TOKEN_DEFAULT_DOUBLE) return; if (top == GROBID_TOKEN_DEFAULT_DOUBLE) { top = tokenY; return; } if (tokenY < top) { top = tokenY; } } private void setBottom(LayoutToken contentToken) { double tokenY = contentToken.getY(); double tokenHeight = contentToken.getHeight(); if (tokenY == GROBID_TOKEN_DEFAULT_DOUBLE || tokenHeight == GROBID_TOKEN_DEFAULT_DOUBLE) return; double tokenBottom = Double.sum(tokenY, tokenHeight); if (bottom == GROBID_TOKEN_DEFAULT_DOUBLE) { bottom = tokenBottom; return; } if (tokenBottom > bottom) { bottom = tokenBottom; } } private void setLeft(LayoutToken contentToken) { double tokenX = contentToken.getX(); if (tokenX == GROBID_TOKEN_DEFAULT_DOUBLE) return; if (left == GROBID_TOKEN_DEFAULT_DOUBLE) { left = tokenX; return; } if (tokenX < left) { left = tokenX; } } private void setRight(LayoutToken contentToken) { double tokenX = contentToken.getX(); double tokenWidth = contentToken.getWidth(); if (tokenX == GROBID_TOKEN_DEFAULT_DOUBLE || tokenWidth == GROBID_TOKEN_DEFAULT_DOUBLE) return; double tokenRight = Double.sum(tokenX, tokenWidth); if (right == GROBID_TOKEN_DEFAULT_DOUBLE) { right = tokenRight; return; } if (tokenRight > right) { right = tokenRight; } } public double getTop() { return top; } public double getBottom() { return bottom; } public double getLeft() { return left; } public double getRight() { return right; } public String getText() { StringBuilder stringBuilder = new StringBuilder(); for (LayoutToken token: contentTokens) { stringBuilder.append(token.getText()); } return stringBuilder.toString(); } public boolean isEmpty() { return this.contentTokens.size() == 0; } }
2,543
20.378151
98
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/table/Line.java
package org.grobid.core.data.table; import org.grobid.core.layout.LayoutToken; import java.util.ArrayList; import java.util.Comparator; import java.util.List; public class Line extends LinePart { private List<LinePart> contentParts = new ArrayList<>(); public void add(LinePart contentPart) { contentParts.add(contentPart); setTop(contentPart); setBottom(contentPart); setLeft(contentPart); setRight(contentPart); } private void setTop(LinePart contentPart) { double partTop = contentPart.getTop(); if (top == GROBID_TOKEN_DEFAULT_DOUBLE || top > partTop) { top = partTop; } } private void setBottom(LinePart contentPart) { double partBottom = contentPart.getBottom(); if (bottom == GROBID_TOKEN_DEFAULT_DOUBLE || bottom < partBottom) { bottom = partBottom; } } private void setLeft(LinePart contentPart) { double partLeft = contentPart.getLeft(); if (left == GROBID_TOKEN_DEFAULT_DOUBLE || left > partLeft) { left = partLeft; } } private void setRight(LinePart contentPart) { double partRight = contentPart.getRight(); if (right == GROBID_TOKEN_DEFAULT_DOUBLE || right < partRight) { right = partRight; } } public List<LinePart> getContent() { if (!this.contentParts.isEmpty()) return this.contentParts; return null; } public boolean isEmpty() { return this.contentParts.size() == 0; } public boolean linePartInBorders(LinePart linePart) { if (this.contentParts.isEmpty()) return true; // token is fully above the line or below, it doesn't overlap if ((this.getTop() > linePart.getBottom()) || this.getBottom() < linePart.getTop()) return false; return true; } @Override public String getText() { StringBuilder stringBuilder = new StringBuilder(); for (LinePart linePart: contentParts) { stringBuilder.append(linePart.getText()); } return stringBuilder.toString(); } public static List<LinePart> extractLineParts(List<LayoutToken> contentTokens) { List<LinePart> lineParts = new ArrayList<>(); LinePart currentLinePart = null; for (int i = 0; i < contentTokens.size(); i++) { LayoutToken contentToken = contentTokens.get(i); if (i == 0) { currentLinePart = new LinePart(); lineParts.add(currentLinePart); } if (!contentToken.getText().equals("\n")) { currentLinePart.add(contentToken); } if (contentToken.getText().equals("\n")) { LinePart newLinePart = new LinePart(); lineParts.add(newLinePart); currentLinePart = newLinePart; } } return lineParts; } /* * Algorithm for extracting lines. * See algorithm 1: Burcu Yildiz, Katharina Kaiser, Silvia Miksch. pdf2table: A Method to Extract Table Information * from PDF Files. */ public static List<Line> extractLines(List<LinePart> lineParts) { List<Line> lines = new ArrayList<>(); Line currentLine = null; int i = lineParts.size() - 1; while (!lineParts.isEmpty() && i >= 0) { LinePart linePart = lineParts.get(i); if (linePart.getText().isEmpty()) { lineParts.remove(i); i--; continue; } if (currentLine == null) { currentLine = new Line(); lines.add(currentLine); currentLine.add(linePart); lineParts.remove(i); i--; continue; } if (currentLine.linePartInBorders(linePart)){ currentLine.add(linePart); lineParts.remove(i); i = lineParts.size() - 1; // return to the first item and recheck borders continue; } if (i == 0) { currentLine = null; i = lineParts.size() - 1; } else { i--; } } lines.sort(Comparator.comparingDouble(Line::getTop)); // sorting by top position return lines; } }
3,635
23.24
116
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/util/AuthorEmailAssigner.java
package org.grobid.core.data.util; import org.grobid.core.data.Person; import java.util.List; public interface AuthorEmailAssigner { //embeds emails into authors //emails should be sanitized before public void assign(List<Person> authors, List<String> emails); }
278
22.25
66
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/util/ClassicAuthorEmailAssigner.java
package org.grobid.core.data.util; import org.grobid.core.data.Person; import org.grobid.core.utilities.TextUtilities; import java.util.ArrayList; import java.util.List; public class ClassicAuthorEmailAssigner implements AuthorEmailAssigner { @Override public void assign(List<Person> fullAuthors, List<String> emails) { List<Integer> winners = new ArrayList<Integer>(); // if 1 email and 1 author, not too hard... if (fullAuthors != null) { if ((emails.size() == 1) && (fullAuthors.size() == 1)) { fullAuthors.get(0).setEmail(emails.get(0)); } else { // we asociate emails to the authors based on string proximity for (String mail : emails) { int maxDist = 1000; int best = -1; int ind = mail.indexOf("@"); if (ind != -1) { String nam = mail.substring(0, ind).toLowerCase(); int k = 0; for (Person aut : fullAuthors) { Integer kk = k; if (!winners.contains(kk)) { List<String> emailVariants = TextUtilities.generateEmailVariants(aut.getFirstName(), aut.getLastName()); for (String variant : emailVariants) { variant = variant.toLowerCase(); int dist = TextUtilities.getLevenshteinDistance(nam, variant); if (dist < maxDist) { best = k; maxDist = dist; } } } k++; } // make sure that the best candidate found is not too far if (best != -1 && maxDist < nam.length() / 2) { Person winner = fullAuthors.get(best); winner.setEmail(mail); winners.add(best); } } } } } } }
2,303
38.724138
136
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/data/util/EmailSanitizer.java
package org.grobid.core.data.util; import com.google.common.base.Splitter; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; public class EmailSanitizer { private static final Pattern DASHES_PATTERN = Pattern.compile("(%E2%80%90|%e2%80%90)"); private static final Set<String> BLACKLISTED_EMAIL_WORDS = Sets.newHashSet( "firstname", "lastname", "publication", "theses", "thesis", "editor", "press", "contact", "info", "feedback", "journal", "please", "pubs", "iza@iza", "admin", "help", "subs", "news", "archives", "order", "postmaster@", "informa", "reprint", "comunicacion@", "revista", "digitalcommons", "group@", "root@", "deposit@", "studies", "permiss", "print", "paper", "report", "support", "pedocs", "investigaciones@", "medicin", "copyright", "rights", "sales@", "pacific@", "redaktion", "publicidad", "surface@", "comstat@", "service@", "omnia@", "letter", "scholar", "staff", "delivery", "epubs", "office", "technolog", "compute", "elsevier" ); private static final Pattern[] EMAIL_STRIP_PATTERNS = new Pattern[] { Pattern.compile("^(e\\-mail|email|e\\smail|mail):"), Pattern.compile("[\\r\\n\\t ]"), // newlines, tabs and spaces Pattern.compile("\\(.*\\)$"), }; private static final Pattern[] AT_SYMBOL_REPLACEMENTS = new Pattern[] { Pattern.compile("&#64;"), Pattern.compile("@\\."), Pattern.compile("\\.@"), }; private static final Pattern EMAIL_SPLITTER_PATTERN = Pattern.compile("(\\sor\\s|,|;|/)"); private static final Pattern AT_SPLITTER = Pattern.compile("@"); /** * @param addresses email addresses * @return cleaned addresses */ public List<String> splitAndClean(List<String> addresses) { if (addresses == null) { return null; } List<String> result = new ArrayList<String>(); Set<String> emails = new HashSet<String>(); for (String emailAddress : addresses) { emailAddress = initialReplace(emailAddress); // StringTokenizer st = new StringTokenizer(emailAddress, ", "); // List<String> emails = new ArrayList<String>(); // while (st.hasMoreTokens()) { // String token = st.nextToken(); // if (token.length() > 2) { // emails.add(token); // } // } // // int i = 0; // for (String token : emails) { // if (!token.contains("@")) { // // the domain information is missing, we are taking the first one of the next tokens // String newToken = null; // int j = 0; // for (String token2 : emails) { // if (j <= i) { // j++; // } else { // int ind = token2.indexOf("@"); // if (ind != -1) { // newToken = token + token2.substring(ind, token2.length()); // break; // } // j++; // } // } // if (newToken != null) { // emails.set(i, newToken); // } // } // i++; // } // List<String> splitEmails = Lists.newArrayList(Splitter.on(EMAIL_SPLITTER_PATTERN) .omitEmptyStrings() .split(emailAddress.toLowerCase()).iterator()); if (splitEmails.size() > 1) { // Some emails are of the form jiglesia,[email protected] or jiglesia;[email protected] or bono/caputo/[email protected] List<String> atSeparatedStrings = Lists.newArrayList(Splitter.on(AT_SPLITTER) .omitEmptyStrings() .split(emailAddress.toLowerCase()).iterator()); if (atSeparatedStrings.size() == 2) { // Only the last email address has a domain, so append it to the rest of the splitted emails int atIndex = splitEmails.get(splitEmails.size() - 1).indexOf('@'); String domain = splitEmails.get(splitEmails.size() - 1).substring(atIndex + 1); for (int i = 0; i < splitEmails.size() - 1; i++) { splitEmails.set(i, splitEmails.get(i) + "@" + domain); } } } for (String splitEmail : splitEmails) { String email; try { email = cleanEmail(splitEmail); } catch (Exception e) { // Cleaning failed so its probably an invalid email so don't keep it continue; } if (email != null && !email.isEmpty()) { // Check for duplicate emails if (emails.contains(email)) { continue; } email = postValidateAddress(email); if (email == null) { continue; } emails.add(email); result.add(email); } } } if (result.isEmpty()) { return null; } return result; } private String initialReplace(String email) { email = email.replace("{", ""); email = email.replace("}", ""); email = email.replace("(", ""); email = email.replace(")", "").trim(); email = email.replaceAll("(E|e)lectronic(\\s)(A|a)ddress(\\:)?", ""); email = email.replaceAll("^(e|E)?(\\-)?mail(\\:)?(\\s)(A|a)ddress(\\:)?", ""); email = email.replaceAll("^(e|E)?(\\-)?mail(\\:)?(\\s)?", ""); // case: Peter Pan [email protected] with asterisks and spaces email = email.replaceAll("^[A-Z][a-z]+\\s+[A-Z][a-z]+(\\*)?(\\s)*-(\\s)*", ""); return email; } private static String postValidateAddress(String emStr) { String orig = emStr; for (String b : BLACKLISTED_EMAIL_WORDS) { if (orig.contains(b)) { return null; } } for (Pattern p : EMAIL_STRIP_PATTERNS) { Matcher matcher = p.matcher(orig); orig = matcher.replaceAll(""); } if (!orig.contains("@")) { return null; } return orig; } private static String cleanEmail(String email) throws UnsupportedEncodingException { if (email == null) { return null; } // Fix any incorrect dashes Matcher dashes = DASHES_PATTERN.matcher(email); email = dashes.replaceAll("-"); // Some emails may contain HTML encoded characters, so decode just in case email = URLDecoder.decode(email, "UTF-8"); email = email.toLowerCase().trim(); for (Pattern p : EMAIL_STRIP_PATTERNS) { Matcher matcher = p.matcher(email); email = matcher.replaceAll(""); } for (Pattern r : AT_SYMBOL_REPLACEMENTS) { Matcher matcher = r.matcher(email); email = matcher.replaceAll("@"); } return email; } }
8,356
30.417293
136
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/transformation/xslt/JATSTransformer.java
package org.grobid.core.transformation.xslt; import org.xml.sax.SAXException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.stream.XMLStreamException; import javax.xml.transform.*; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.StringWriter; import java.nio.charset.Charset; public class JATSTransformer { public final static String xsltPath = "/xslt/grobid-jats.xsl"; private static Transformer t; static { net.sf.saxon.TransformerFactoryImpl tf = new net.sf.saxon.TransformerFactoryImpl(); tf.setURIResolver(new URIResolver() { @Override public Source resolve(String href, String base) throws TransformerException { return new StreamSource(this.getClass().getResource("/xslt/" + href).getPath()); } }); InputStream is = JATSTransformer.class.getResourceAsStream(xsltPath); try { t = tf.newTransformer(new StreamSource(is)); } catch (TransformerConfigurationException e) { throw new RuntimeException(e); } } public String transform(String input) throws TransformerException, ParserConfigurationException, SAXException, XMLStreamException { return transform(input, t); } protected String transform(String input, Transformer t) throws TransformerException, ParserConfigurationException, SAXException, XMLStreamException { StringWriter w = new StringWriter(); Result r = new StreamResult(w); StreamSource xmlSource = new StreamSource(new ByteArrayInputStream(input.getBytes(Charset.forName("UTF-8")))); t.transform(xmlSource, r); return w.toString(); } public String stripNamespaces(String input) { return input.replaceAll("xmlns:(pdm|xsl|ext|exch)=\"[^\"]+\"", ""); } }
1,964
34.089286
153
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/main/GrobidHomeFinder.java
package org.grobid.core.main; import com.google.common.collect.Lists; import com.google.common.hash.Hashing; import org.apache.commons.io.FileUtils; import org.grobid.core.exceptions.GrobidException; import org.grobid.core.exceptions.GrobidExceptionStatus; import org.grobid.core.exceptions.GrobidPropertyException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.JarURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.Charset; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; /** * This class is responsible for finding a right grobid home */ public class GrobidHomeFinder { private static final String PROP_GROBID_HOME = "org.grobid.home"; private static final String PROP_GROBID_CONFIG = "org.grobid.config"; private static final Logger LOGGER = LoggerFactory.getLogger(GrobidHomeFinder.class); private static final List<String> GROBID_FOLDER_POSSIBLE_LOCATIONS = Lists.newArrayList("../grobid-home", "grobid-home", "GROBID_HOME"); private static final int BUFFER_SIZE = 4096; private final List<String> grobidHomePossibleLocations; public GrobidHomeFinder() { this(GROBID_FOLDER_POSSIBLE_LOCATIONS); } public GrobidHomeFinder(List<String> grobidHomePossibleLocations) { if (grobidHomePossibleLocations == null) { this.grobidHomePossibleLocations = Collections.emptyList(); } else { this.grobidHomePossibleLocations = grobidHomePossibleLocations; } } public File findGrobidHomeOrFail() { File gh = getGrobidHomePathOrLoadFromClasspath(); LOGGER.info("***************************************************************"); LOGGER.info("*** USING GROBID HOME: " + gh.getAbsolutePath()); LOGGER.info("***************************************************************"); if (!gh.exists() || !gh.isDirectory()) { fail("Grobid home folder '" + gh.getAbsolutePath() + "' was detected for usage, but does not exist"); } return gh; } public File findGrobidConfigOrFail(File grobidHome) { if (grobidHome == null || !grobidHome.exists() || !grobidHome.isDirectory()) { fail("Grobid home folder '" + grobidHome + "' was detected for usage, but does not exist or null"); } String grobidConfig = System.getProperty(PROP_GROBID_CONFIG); File grobidConfigFile; if (grobidConfig == null) { grobidConfigFile = new File(grobidHome, "config/grobid.yaml").getAbsoluteFile(); LOGGER.info("Grobid config file location was not explicitly set via '" + PROP_GROBID_CONFIG + "' system variable, defaulting to: " + grobidConfigFile); } else { grobidConfigFile = new File(grobidConfig).getAbsoluteFile(); } if (!grobidConfigFile.exists() || grobidConfigFile.isDirectory()) { fail("Grobid property file '" + grobidConfigFile + "' does not exist or a directory"); } return grobidConfigFile; } private static void fail(String msg, Throwable e) { throw new GrobidPropertyException(msg, e); } private static void fail(String msg) { throw new GrobidPropertyException(msg); } private File getGrobidHomePathOrLoadFromClasspath() { String grobidHomeProperty = System.getProperty(PROP_GROBID_HOME); if (grobidHomeProperty != null) { try { URL url = new URL(grobidHomeProperty); if (url.getProtocol().equals("file")) { return new File(grobidHomeProperty); } else if (url.getProtocol().equals("http") || url.getProtocol().equals("https")) { // to do, download and cache try { return unzipToTempFile(url, false); } catch (IOException e) { fail("Cannot fetch Grobid home from: " + url, e); } } } catch (MalformedURLException e) { // just normal path, return it return new File(grobidHomeProperty); } } else { LOGGER.info("No Grobid property was provided. Attempting to find Grobid home in the current directory..."); for (String possibleName : grobidHomePossibleLocations) { File gh = new File(possibleName); if (gh.exists()) { return gh.getAbsoluteFile(); } } LOGGER.info("Attempting to find and in the classpath..."); // TODO: inject a descriptive file into Grobid home URL url = GrobidHomeFinder.class.getResource("/grobid-home/lexicon/names/firstname.5k"); if (url == null) { fail("No Grobid home was found in classpath and no Grobid home location was not provided"); } if (url.getProtocol().equals("jar")) { final JarURLConnection connection; try { connection = (JarURLConnection) url.openConnection(); final URL zipUrl = connection.getJarFileURL(); return unzipToTempFile(zipUrl, false); } catch (IOException e) { fail("Cannot load a Grobid home from classpath", e); } } else { fail("Unsupported protocol for Grobid home at location: " + url); } } fail("Cannot locate Grobid home: add it to classpath or explicitly provide a system property: '-D" + PROP_GROBID_HOME + "'"); // not reachable code since exception is thrown return null; } private static File unzipToTempFile(URL zipUrl, boolean forceReload) throws IOException { String hash = Hashing.md5().hashString(zipUrl.toString(), Charset.defaultCharset()).toString(); File tempRootDir = new File(System.getProperty("java.io.tmpdir")); File grobidHome = new File(tempRootDir, "grobid-home-" + System.getProperty("user.name") + "-" + hash); LOGGER.info("Extracting and caching Grobid home to " + grobidHome); if (grobidHome.exists()) { if (forceReload) { FileUtils.deleteDirectory(grobidHome); } else { LOGGER.warn("Grobid home already cached under: " + grobidHome + "; delete it if you want a new copy"); return new File(grobidHome, "grobid-home"); } } if (!grobidHome.mkdir()) { fail("Cannot create folder for Grobid home: " + grobidHome); } unzip(zipUrl.openStream(), grobidHome); return new File(grobidHome, "grobid-home"); } private static List<Path> unzip(InputStream is, File destinationDir) throws IOException { List<Path> list = new ArrayList<>(); ZipInputStream zipIn = new ZipInputStream(is); ZipEntry entry = zipIn.getNextEntry(); while (entry != null) { File filePath = new File(destinationDir, entry.getName()); try { if (!entry.isDirectory()) { String absolutePath = filePath.getAbsolutePath(); extractFile(zipIn, absolutePath); } else { //noinspection ResultOfMethodCallIgnored filePath.mkdir(); } } finally { zipIn.closeEntry(); entry = zipIn.getNextEntry(); } } zipIn.close(); return list; } private static void extractFile(ZipInputStream zipIn, String filePath) throws IOException { File file = new File(filePath); if (!file.getParentFile().exists()) { if (!file.getParentFile().mkdirs()) { throw new IOException("Cannot create parent folders: " + file.getParentFile()); } } BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath)); byte[] bytesIn = new byte[BUFFER_SIZE]; int read; while ((read = zipIn.read(bytesIn)) != -1) { bos.write(bytesIn, 0, read); } bos.close(); } public static void main(String[] args) throws IOException { long t = System.currentTimeMillis(); File grobidHomePathOrLoadFromClasspath = new GrobidHomeFinder().findGrobidHomeOrFail(); System.out.println(grobidHomePathOrLoadFromClasspath); System.out.println("Took: " + (System.currentTimeMillis() - t)); } }
8,939
38.910714
140
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/main/LibraryLoader.java
package org.grobid.core.main; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.SystemUtils; import org.grobid.core.engines.tagging.GrobidCRFEngine; import org.grobid.core.exceptions.GrobidException; import org.grobid.core.jni.PythonEnvironmentConfig; import org.grobid.core.utilities.GrobidProperties; import org.grobid.core.utilities.Utilities; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FilenameFilter; import java.lang.reflect.Field; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; import java.util.Set; import static org.apache.commons.lang3.ArrayUtils.isEmpty; public class LibraryLoader { private static Logger LOGGER = LoggerFactory.getLogger(LibraryLoader.class); public static final String CRFPP_NATIVE_LIB_NAME = "libcrfpp"; public static final String WAPITI_NATIVE_LIB_NAME = "libwapiti"; public static final String DELFT_NATIVE_LIB_NAME_LINUX = "libjep"; public static final String DELFT_NATIVE_LIB_NAME = "jep"; private static boolean loaded = false; public static void load() { if (!loaded) { LOGGER.info("Loading external native sequence labelling library"); LOGGER.debug(getLibraryFolder()); Set<GrobidCRFEngine> distinctModels = GrobidProperties.getInstance().getDistinctModels(); for(GrobidCRFEngine distinctModel : distinctModels) { if (distinctModel != GrobidCRFEngine.CRFPP && distinctModel != GrobidCRFEngine.WAPITI && distinctModel != GrobidCRFEngine.DELFT) { throw new IllegalStateException("Unsupported sequence labelling engine: " + distinctModel); } } File libraryFolder = new File(getLibraryFolder()); if (!libraryFolder.exists() || !libraryFolder.isDirectory()) { LOGGER.error("Unable to find a native sequence labelling library: Folder " + libraryFolder + " does not exist"); throw new RuntimeException( "Unable to find a native sequence labelling library: Folder " + libraryFolder + " does not exist"); } if (CollectionUtils.containsAny(distinctModels, Collections.singletonList(GrobidCRFEngine.CRFPP))) { File[] files = libraryFolder.listFiles(file -> file.getName().toLowerCase().startsWith(CRFPP_NATIVE_LIB_NAME)); if (ArrayUtils.isEmpty(files)) { LOGGER.error("Unable to find a native CRF++ library: No files starting with " + CRFPP_NATIVE_LIB_NAME + " are in folder " + libraryFolder); throw new RuntimeException( "Unable to find a native CRF++ library: No files starting with " + CRFPP_NATIVE_LIB_NAME + " are in folder " + libraryFolder); } if (files.length > 1) { LOGGER.error("Unable to load a native CRF++ library: More than 1 library exists in " + libraryFolder); throw new RuntimeException( "Unable to load a native CRF++ library: More than 1 library exists in " + libraryFolder); } String libPath = files[0].getAbsolutePath(); // finally loading a library try { System.load(libPath); } catch (Exception e) { LOGGER.error("Unable to load a native CRF++ library, although it was found under path " + libPath); throw new RuntimeException( "Unable to load a native CRF++ library, although it was found under path " + libPath, e); } } if (CollectionUtils.containsAny(distinctModels, Collections.singletonList(GrobidCRFEngine.WAPITI))) { File[] wapitiLibFiles = libraryFolder.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith(WAPITI_NATIVE_LIB_NAME); } }); if (isEmpty(wapitiLibFiles)) { LOGGER.info("No wapiti library in the Grobid home folder"); } else { LOGGER.info("Loading Wapiti native library..."); if (CollectionUtils.containsAny(distinctModels, Collections.singletonList(GrobidCRFEngine.DELFT))) { // if DeLFT will be used, we must not load libstdc++, it would create a conflict with tensorflow libstdc++ version // so we temporary rename the lib so that it is not loaded in this case // note that we know that, in this case, the local lib can be ignored because as DeFLT and tensorflow are installed // we are sure that a compatible libstdc++ lib is installed on the system and can be dynamically loaded String libstdcppPath = libraryFolder.getAbsolutePath() + File.separator + "libstdc++.so.6"; File libstdcppFile = new File(libstdcppPath); if (libstdcppFile.exists()) { File libstdcppFileNew = new File(libstdcppPath + ".new"); libstdcppFile.renameTo(libstdcppFileNew); } String libgccPath = libraryFolder.getAbsolutePath() + File.separator + "libgcc_s.so.1"; File libgccFile = new File(libgccPath); if (libgccFile.exists()) { File libgccFileNew = new File(libgccPath + ".new"); libgccFile.renameTo(libgccFileNew); } } try { System.load(wapitiLibFiles[0].getAbsolutePath()); } finally { if (CollectionUtils.containsAny(distinctModels, Arrays.asList(GrobidCRFEngine.DELFT))) { // restore libstdc++ String libstdcppPathNew = libraryFolder.getAbsolutePath() + File.separator + "libstdc++.so.6.new"; File libstdcppFileNew = new File(libstdcppPathNew); if (libstdcppFileNew.exists()) { File libstdcppFile = new File(libraryFolder.getAbsolutePath() + File.separator + "libstdc++.so.6"); libstdcppFileNew.renameTo(libstdcppFile); } // restore libgcc String libgccPathNew = libraryFolder.getAbsolutePath() + File.separator + "libgcc_s.so.1.new"; File libgccFileNew = new File(libgccPathNew); if (libgccFileNew.exists()) { File libgccFile = new File(libraryFolder.getAbsolutePath() + File.separator + "libgcc_s.so.1"); libgccFileNew.renameTo(libgccFile); } } } } } if (CollectionUtils.containsAny(distinctModels, Collections.singletonList(GrobidCRFEngine.DELFT))) { LOGGER.info("Loading JEP native library for DeLFT... " + libraryFolder.getAbsolutePath()); // actual loading will be made at JEP initialization, so we just need to add the path in the // java.library.path (JEP will anyway try to load from java.library.path, so explicit file // loading here will not help) try { PythonEnvironmentConfig pythonEnvironmentConfig = PythonEnvironmentConfig.getInstance(); if (pythonEnvironmentConfig.isEmpty()) { LOGGER.info("No python environment configured"); } else { if (SystemUtils.IS_OS_MAC) { System.loadLibrary("python" + pythonEnvironmentConfig.getPythonVersion()); System.loadLibrary(DELFT_NATIVE_LIB_NAME); } else if (SystemUtils.IS_OS_LINUX) { System.loadLibrary(DELFT_NATIVE_LIB_NAME); } else if (SystemUtils.IS_OS_WINDOWS) { throw new UnsupportedOperationException("Delft on Windows is not supported."); } } } catch (Exception e) { throw new GrobidException("Loading JEP native library for DeLFT failed", e); } } loaded = true; LOGGER.info("Native library for sequence labelling loaded"); } } @Deprecated public static void addLibraryPath(String pathToAdd) throws Exception { Field usrPathsField = ClassLoader.class.getDeclaredField("usr_paths"); usrPathsField.setAccessible(true); String[] paths = (String[]) usrPathsField.get(null); for (String path : paths) if (path.equals(pathToAdd)) return; String[] newPaths = new String[paths.length + 1]; System.arraycopy(paths, 0, newPaths, 1, paths.length); newPaths[0] = pathToAdd; usrPathsField.set(null, newPaths); } public static String getLibraryFolder() { GrobidProperties.getInstance(); return String.format("%s" + File.separator + "%s", GrobidProperties.getNativeLibraryPath().getAbsolutePath(), Utilities.getOsNameAndArch()); } }
9,981
48.91
139
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/main/batch/GrobidMain.java
package org.grobid.core.main.batch; import java.io.File; import java.util.Arrays; import java.util.List; import org.grobid.core.engines.ProcessEngine; import org.grobid.core.main.GrobidHomeFinder; import org.grobid.core.main.LibraryLoader; import org.grobid.core.utilities.GrobidProperties; import org.grobid.core.utilities.Utilities; /** * The entrance point for starting grobid from command line and perform batch processing * */ public class GrobidMain { private static List<String> availableCommands; /** * Arguments of the batch. */ private static GrobidMainArgs gbdArgs; /** * Build the path to grobid.properties from the path to grobid-home. * * @param pPath2GbdHome * The path to Grobid home. * @return the path to grobid.properties. */ protected final static String getPath2GbdProperties(final String pPath2GbdHome) { return pPath2GbdHome + File.separator + "config" + File.separator + "grobid.properties"; } /** * Infer some parameters not given in arguments. */ protected static void inferParamsNotSet() { String tmpFilePath; if (gbdArgs.getPath2grobidHome() == null) { tmpFilePath = new File("grobid-home").getAbsolutePath(); System.out.println("No path set for grobid-home. Using: " + tmpFilePath); gbdArgs.setPath2grobidHome(tmpFilePath); gbdArgs.setPath2grobidProperty(new File("grobid.properties").getAbsolutePath()); } } protected static void initProcess() { GrobidProperties.getInstance(); } protected static void initProcess(String grobidHome) { try { final GrobidHomeFinder grobidHomeFinder = new GrobidHomeFinder(Arrays.asList(grobidHome)); grobidHomeFinder.findGrobidHomeOrFail(); GrobidProperties.getInstance(grobidHomeFinder); } catch (final Exception exp) { System.err.println("Grobid initialisation failed: " + exp); } } /** * @return String to display for help. */ protected static String getHelp() { final StringBuffer help = new StringBuffer(); help.append("\nHELP for GROBID batch\n\n"); help.append("Command line arguments:\n"); help.append(" -h:\n \tdisplays help\n"); help.append(" -gH:\n \tgives the path to grobid home directory.\n"); help.append(" -dIn:\n \tgives the path to the directory where the files to be processed are located, to be used only when the called method process files.\n"); help.append(" -dOut:\n \tgives the path to the directory where the result files will be saved. The default output directory is the curent directory.\n"); help.append(" -s:\n \tgives a string as input to be processed, to be used only when the called method process a string.\n"); help.append(" -r:\n \trecursive directory processing, default processing is not recursive.\n"); help.append(" -ignoreAssets:\n \tdo not extract and save the PDF assets (bitmaps, vector graphics), by default the assets are extracted and saved.\n"); help.append(" -teiCoordinates:\n \toutput a subset of the identified structures with coordinates in the original PDF, by default no coordinates are present.\n"); help.append(" -addElementId:\n \tadd xml:id attribute automatically to the XML elements in the resulting TEI XML, by default no xml:id are added.\n"); help.append(" -segmentSentences:\n \tadd sentence segmentation level structures for paragraphs in the TEI XML result, by default no sentence segmentation is present.\n"); help.append(" -exe:\n \tgives the command to execute. The value should be one of these:\n"); help.append("\t" + availableCommands + "\n"); return help.toString(); } /** * Process batch given the args. * * @param pArgs * The arguments given to the batch. */ protected static boolean processArgs(final String[] pArgs) { boolean result = true; if (pArgs.length == 0) { System.out.println(getHelp()); result = false; } else { String currArg; for (int i = 0; i < pArgs.length; i++) { currArg = pArgs[i]; if (currArg.equals("-h")) { System.out.println(getHelp()); result = false; break; } if (currArg.equals("-gH")) { gbdArgs.setPath2grobidHome(pArgs[i + 1]); if (pArgs[i + 1] != null) { gbdArgs.setPath2grobidProperty(getPath2GbdProperties(pArgs[i + 1])); } i++; continue; } if (currArg.equals("-dIn")) { if (pArgs[i + 1] != null) { gbdArgs.setPath2Input(pArgs[i + 1]); gbdArgs.setPdf(true); } i++; continue; } if (currArg.equals("-s")) { if (pArgs[i + 1] != null) { gbdArgs.setInput(pArgs[i + 1]); gbdArgs.setPdf(false); } i++; continue; } if (currArg.equals("-dOut")) { if (pArgs[i + 1] != null) { gbdArgs.setPath2Output(pArgs[i + 1]); } i++; continue; } if (currArg.equals("-exe")) { final String command = pArgs[i + 1]; if (availableCommands.contains(command)) { gbdArgs.setProcessMethodName(command); i++; continue; } else { System.err.println("-exe value should be one value from this list: " + availableCommands); result = false; break; } } if (currArg.equals("-ignoreAssets")) { gbdArgs.setSaveAssets(false); continue; } if (currArg.equals("-addElementId")) { gbdArgs.setAddElementId(true); continue; } if (currArg.equals("-teiCoordinates")) { gbdArgs.setTeiCoordinates(true); continue; } if (currArg.equals("-segmentSentences")) { gbdArgs.setSegmentSentences(true); continue; } if (currArg.equals("-r")) { gbdArgs.setRecursive(true); continue; } } } return result; } /** * Starts Grobid from command line using the following parameters: * * @param args * The arguments */ public static void main(final String[] args) throws Exception { gbdArgs = new GrobidMainArgs(); availableCommands = ProcessEngine.getUsableMethods(); if (processArgs(args)) { inferParamsNotSet(); if(gbdArgs.getPath2grobidHome() != null) { initProcess(gbdArgs.getPath2grobidHome()); } else { initProcess(); } ProcessEngine processEngine = new ProcessEngine(); Utilities.launchMethod(processEngine, new Object[] { gbdArgs }, gbdArgs.getProcessMethodName()); processEngine.close(); } } }
6,408
30.885572
173
java
grobid
grobid-master/grobid-core/src/main/java/org/grobid/core/main/batch/GrobidMainArgs.java
package org.grobid.core.main.batch; /** * Class containing args of the batch {@link GrobidMain}. * */ public class GrobidMainArgs { private String path2grobidHome; private String path2grobidProperty; private String path2Input; private String path2Output; private String processMethodName; private String input; private boolean isPdf; private boolean recursive = false; private boolean saveAssets = true; private boolean teiCoordinates = false; private boolean consolidateHeader = true; private boolean consolidateCitation = false; private boolean segmentSentences = false; private boolean addElementId = false; /** * @return the path2grobidHome */ public final String getPath2grobidHome() { return path2grobidHome; } /** * @param pPath2grobidHome * the path2grobidHome to set */ public final void setPath2grobidHome(final String pPath2grobidHome) { path2grobidHome = pPath2grobidHome; } /** * @return the path2grobidProperty */ public final String getPath2grobidProperty() { return path2grobidProperty; } /** * @param pPath2grobidProperty * the path2grobidProperty to set */ public final void setPath2grobidProperty(final String pPath2grobidProperty) { path2grobidProperty = pPath2grobidProperty; } /** * @return the path2input */ public final String getPath2Input() { return path2Input; } /** * @param pPath2input * the path2input to set */ public final void setPath2Input(final String pPath2input) { path2Input = pPath2input; } /** * @return the path2Output */ public final String getPath2Output() { return path2Output; } /** * @param pPath2Output * the path2Output to set */ public final void setPath2Output(final String pPath2Output) { path2Output = pPath2Output; } /** * @return the processMethodName */ public final String getProcessMethodName() { return processMethodName; } /** * @param pProcessMethodName * the processMethodName to set */ public final void setProcessMethodName(final String pProcessMethodName) { processMethodName = pProcessMethodName; } /** * @return the input */ public final String getInput() { return input; } /** * @param pInput * the input to set */ public final void setInput(final String pInput) { input = pInput; } /** * @return the isPdf */ public final boolean isPdf() { return isPdf; } /** * @param pIsPdf * the isPdf to set */ public final void setPdf(final boolean pIsPdf) { isPdf = pIsPdf; } /** * @return true if recursive file processing */ public final boolean isRecursive() { return recursive; } /** * @return true if consolidation of header metadata should be done */ public final boolean isConsolidateHeader() { return consolidateHeader; } /** * @return true if consolidation of citation metadata should be done */ public final boolean isConsolidateCitation() { return consolidateCitation; } /** * @return true if consolidation of header metadata should be done */ public final boolean getConsolidateHeader() { return consolidateHeader; } /** * @return true if consolidation of citation metadata should be done */ public final boolean getConsolidateCitation() { return consolidateCitation; } /** * @return true if the PDF assets (bitmaps, vector graphics) should be also extracted and saved */ public final boolean getSaveAssets() { return saveAssets; } /** * @param pSaveAssets true if the PDF assets (bitmaps, vector graphics) should be also extracted and saved */ public final void setSaveAssets(boolean pSaveAssets) { saveAssets = pSaveAssets; } /** * @param pRecursive * recursive file processing parameter to set */ public final void setRecursive(final boolean pRecursive) { recursive = pRecursive; } /** * @return true if output a subset of the identified structures with coordinates in the original PDF */ public final boolean getTeiCoordinates() { return teiCoordinates; } /** * @param pTeiCoordinates * output a subset of the identified structures with coordinates in the original PDF */ public final void setTeiCoordinates(final boolean pTeiCoordinates) { teiCoordinates = pTeiCoordinates; } /** * @return true if output a subset of the xml:id attributes must be added automatically to the resulting TEI XML elements */ public final boolean getAddElementId() { return addElementId; } /** * @param pAddElementId * add xml:id attribute automatically on elements in the resulting TEI XML */ public final void setAddElementId(final boolean pAddElementId) { addElementId = pAddElementId; } /** * @return true if we add sentence segmentation level structures for paragraphs in the TEI XML result */ public final boolean getSegmentSentences() { return segmentSentences; } /** * @param pSegmentSentences * add sentence segmentation level structures for paragraphs in the TEI XML result */ public final void setSegmentSentences(final boolean pSegmentSentences) { segmentSentences = pSegmentSentences; } }
5,217
20.297959
122
java
null
tesp-main/examples/capabilities/loadshed/helicshed.java
// Copyright (C) 2021-2022 Battelle Memorial Institute // file: helicsshed.java import com.java.helics.helics; import com.java.helics.*; public class helicshed { public static void main(String[] args) { System.loadLibrary ("helicsJava"); System.out.println ("HELICS Version: " + helics.helicsGetVersion()); SWIGTYPE_p_void fi = helics.helicsCreateFederateInfo(); helics.helicsFederateInfoSetCoreTypeFromString (fi, "zmq"); helics.helicsFederateInfoSetCoreInitString (fi, "--federates=1"); helics.helicsFederateInfoSetTimeProperty (fi, HelicsProperties.HELICS_PROPERTY_TIME_DELTA.swigValue(), 1.0); SWIGTYPE_p_void fed = helics.helicsCreateCombinationFederate ("shedfed", fi); SWIGTYPE_p_void pubid = helics.helicsFederateRegisterGlobalPublication (fed, "loadshed/sw_status", HelicsDataTypes.HELICS_DATA_TYPE_STRING, ""); helics.helicsFederateEnterInitializingMode (fed); helics.helicsFederateEnterExecutingMode (fed); int [][] switchings = {{0,1},{1800,0},{5400,1},{16200,0},{19800,1}}; int hours = 6; int seconds = 60 * 60 * hours; double grantedtime = -1; for (int i = 0; i < switchings.length; i++) { int t = switchings[i][0]; String val = Integer.toString (switchings[i][1]); System.out.println ("Requesting " + Integer.toString (t)); while (grantedtime < t) { grantedtime = helics.helicsFederateRequestTime(fed, t); } System.out.println ("Sending " + val); helics.helicsPublicationPublishString (pubid, val); } helics.helicsFederateFinalize (fed); helics.helicsCloseLibrary (); } }
1,635
35.355556
113
java
null
tesp-main/examples/capabilities/loadshedf/loadshed.java
// Copyright (C) 2021-2022 Battelle Memorial Institute // file: helicsshed.py import fncs.JNIfncs; public class loadshed { public static void main(String[] args) { long time_granted=0; long time_stop=Long.parseLong(args[0]); JNIfncs.initialize(); assert JNIfncs.is_initialized(); while (time_granted < time_stop) { time_granted = JNIfncs.time_request(time_stop); String[] events = JNIfncs.get_events(); for (int i=0; i<events.length; ++i) { String value = JNIfncs.get_value(events[i]); String[] values = JNIfncs.get_values(events[i]); assert value == values[0]; if (events[i].equals("sw_status")) { JNIfncs.publish("sw_status", values[0]); System.out.printf("** publishing sw_status=%s\n", values[0]); } for (int j=0; j<values.length; ++j) { System.out.printf("\t%d\t[%d] %s\t[%d] %s\n", time_granted, i, events[i], j, values[j]); } } } JNIfncs.end(); assert !JNIfncs.is_initialized(); } }
1,187
32.942857
108
java
null
tesp-main/scripts/build/test_helics.java
// Copyright (C) 2021-2022 Battelle Memorial Institute // file: test_helics.java import com.java.helics.helics; public class test_helics { public static void main(String[] args) { System.loadLibrary ("helicsJava"); System.out.println ("HELICS Java, " + helics.helicsGetVersion()); } }
305
24.5
70
java
iri
iri-master/src/main/java/com/iota/iri/BundleValidator.java
package com.iota.iri; import com.iota.iri.controllers.TransactionViewModel; import com.iota.iri.crypto.*; import com.iota.iri.model.Hash; import com.iota.iri.service.snapshot.Snapshot; import com.iota.iri.storage.Tangle; import com.iota.iri.utils.Converter; import java.util.*; import com.google.common.annotations.VisibleForTesting; /** * Validates bundles. * <p> * Bundles are lists of transactions that represent an atomic transfer, meaning that either all transactions inside the * bundle will be accepted by the network, or none. All transactions in a bundle have the same bundle hash and are * chained together via their trunks. * </p> */ public class BundleValidator { /** * Used to signal the validity of the input transaction. */ public enum Validity { /** * Says that a validation could not have been executed because parts of the bundle or other data is missing. */ UNKNOWN, /** * Says that the transaction and its bundle is valid. */ VALID, /** * Says that the transaction and its bundle is invalid. */ INVALID; } /** * Instructs the validation code to validate the signatures of the bundle. */ public static final int MODE_VALIDATE_SIGNATURES = 1; /** * Instructs the validation code to compare the computed bundle hash from the essence data against the tail * transaction's bundle hash. */ public static final int MODE_VALIDATE_BUNDLE_HASH = 1 << 1; /** * Instructs the validation code to check the integrity of the bundle by checking current/last index positions, * making sure that every transaction of the the bundle is present, value transaction addresses end with a 0 trit, * the bundle's aggregation of value doesn't exceed the max supply of tokens and the sum of the bundle equals 0. */ public static final int MODE_VALIDATE_SEMANTICS = 1 << 2; /** * Instructs the validation code to validate all transactions within the bundle approve via their branch the trunk * transaction of the head transaction */ public static final int MODE_VALIDATE_BUNDLE_TX_APPROVAL = 1 << 3; /** * Instructs the validation code to validate that the bundle only approves tail txs. */ public static final int MODE_VALIDATE_TAIL_APPROVAL = 1 << 4; /** * Instructs the validation code to fully validate the semantics, bundle hash and signatures of the given bundle. */ public static final int MODE_VALIDATE_ALL = MODE_VALIDATE_SIGNATURES | MODE_VALIDATE_BUNDLE_HASH | MODE_VALIDATE_SEMANTICS | MODE_VALIDATE_TAIL_APPROVAL | MODE_VALIDATE_BUNDLE_TX_APPROVAL; /** * Instructs the validation code to skip checking the bundle's already computed validity and instead to proceed to * validate the bundle further. */ public static final int MODE_SKIP_CACHED_VALIDITY = 1 << 5; /** * Instructs the validation code to skip checking whether the tail transaction is present or a tail transaction was * given as the start transaction. */ public static final int MODE_SKIP_TAIL_TX_EXISTENCE = 1 << 6; /** * Fetches a bundle of transactions identified by the {@code tailHash} and validates the transactions. Bundle is a * group of transactions with the same bundle hash chained by their trunks. * <p> * The fetched transactions have the same bundle hash as the transaction identified by {@code tailHash} The * validation does the following semantic checks: * </p> * <ol> * <li>The absolute bundle value never exceeds the total, global supply of iotas</li> * <li>The last trit when we convert from binary is 0</li> * <li>Total bundle value is 0 (inputs and outputs are balanced)</li> * <li>Recalculate the bundle hash by absorbing and squeezing the transactions' essence</li> * <li>Validate the signature on input transactions</li> * <li>The bundle must only approve tail transactions</li> * <li>All transactions within the bundle approve via their branch the trunk transaction of the head transaction.</li> * </ol> * <p> * As well as the following syntactic checks: * <ol> * <li>{@code tailHash} has an index of 0</li> * <li>The transactions' reference order is consistent with the indexes</li> * <li>The last index of each transaction in the bundle matches the last index of the tail transaction</li> * <li>Check that last trit in a valid address hash is 0. We generate addresses using binary Kerl and * we lose the last trit in the process</li> * </ol> * * @param tangle used to fetch the bundle's transactions from the persistence layer * @param enforceExtraRules true if enforce {@link #validateBundleTailApproval(Tangle, List)} and * {@link #validateBundleTransactionsApproval(List)} * @param initialSnapshot the initial snapshot that defines the genesis for our ledger state * @param tailHash the hash of the last transaction in a bundle. * @return A list of transactions of the bundle contained in another list. If the bundle is valid then the tail * transaction's {@link TransactionViewModel#getValidity()} will return 1, else {@link * TransactionViewModel#getValidity()} will return -1. If the bundle is invalid then an empty list will be * returned. * @throws Exception if a persistence error occurred * @implNote if {@code tailHash} was already invalidated/validated by a previous call to this method then we don't * validate it again. * </p> */ public List<TransactionViewModel> validate(Tangle tangle, boolean enforceExtraRules, Snapshot initialSnapshot, Hash tailHash) throws Exception { int mode = getMode(enforceExtraRules); return validate(tangle, initialSnapshot, tailHash, mode); } /** * Does {@link #validate(Tangle, boolean, Snapshot, Hash)} but with an option of skipping some checks according to * the give {@code mode} * * @param tangle used to fetch the bundle's transactions from the persistence layer * @param initialSnapshot the initial snapshot that defines the genesis for our ledger state * @param tailHash the hash of the last transaction in a bundle. * @param mode flags that specify which validation checks to perform * @return A list of transactions of the bundle contained in another list. If the bundle is valid then the tail * transaction's {@link TransactionViewModel#getValidity()} will return 1, else * {@link TransactionViewModel#getValidity()} will return -1. If the bundle is invalid then an empty list * will be returned. * @throws Exception if a persistence error occurred * @implNote if {@code tailHash} was already invalidated/validated by a previous call to this method then we don't * validate it again. * @see #validate(Tangle, boolean, Snapshot, Hash) */ private List<TransactionViewModel> validate(Tangle tangle, Snapshot initialSnapshot, Hash tailHash, int mode) throws Exception { List<TransactionViewModel> bundleTxs = new LinkedList<>(); switch (validate(tangle, tailHash, mode, bundleTxs)) { case VALID: if (bundleTxs.get(0).getValidity() != 1) { bundleTxs.get(0).setValidity(tangle, initialSnapshot, 1); } return bundleTxs; case INVALID: if (!bundleTxs.isEmpty() && bundleTxs.get(0).getValidity() != -1) { bundleTxs.get(0).setValidity(tangle, initialSnapshot, -1); } case UNKNOWN: default: return Collections.EMPTY_LIST; } } private static int getMode(boolean enforceExtraRules) { if (enforceExtraRules) { return MODE_VALIDATE_ALL; } return MODE_VALIDATE_SIGNATURES | MODE_VALIDATE_BUNDLE_HASH | MODE_VALIDATE_SEMANTICS; } private static boolean hasMode(int mode, int has) { return (mode & has) == has; } /** * <p> * Loads the rest of the bundle of the given start transaction and then validates the bundle given the mode of * validation. * </p> * <p> * Note that this method does not update the validity flag of the given transaction in the database. * </p> * * @param tangle used to fetch the bundle's transactions from the persistence layer * @param startTxHash the hash of the entrypoint transaction, must be the tail transaction if {@link * BundleValidator#MODE_SKIP_TAIL_TX_EXISTENCE} is not used * @param validationMode the validation mode defining the level of validation done with the loaded up bundle * @param bundleTxs a list which gets filled with the transactions of the bundle * @return whether the validation criteria were passed or not * @throws Exception if an error occurred in the persistence layer */ @VisibleForTesting Validity validate(Tangle tangle, Hash startTxHash, int validationMode, List<TransactionViewModel> bundleTxs) throws Exception { TransactionViewModel startTx = TransactionViewModel.fromHash(tangle, startTxHash); if (startTx == null || (!hasMode(validationMode, MODE_SKIP_TAIL_TX_EXISTENCE) && (startTx.getCurrentIndex() != 0 || startTx.getValidity() == -1))) { return Validity.INVALID; } // load up the bundle by going through the trunks (note that we might not load up the entire bundle in case we // were instructed to not check whether we actually got the tail transaction) Map<Hash, TransactionViewModel> bundleTxsMapping = loadTransactionsFromTangle(tangle, startTx, !hasMode(validationMode, MODE_VALIDATE_SEMANTICS)); // check the semantics of the bundle: total sum, semantics per tx (current/last index), missing txs, supply Validity bundleSemanticsValidity = validateBundleSemantics(startTx, bundleTxsMapping, bundleTxs, validationMode); if (hasMode(validationMode, MODE_VALIDATE_SEMANTICS) && bundleSemanticsValidity != Validity.VALID) { return bundleSemanticsValidity; } // return if the bundle's validity was computed before if (!hasMode(validationMode, MODE_SKIP_CACHED_VALIDITY) && startTx.getValidity() == 1) { return Validity.VALID; } // compute the normalized bundle hash used to verify the signatures final byte[] normalizedBundle = new byte[Curl.HASH_LENGTH / ISS.TRYTE_WIDTH]; Validity bundleHashValidity = validateBundleHash(bundleTxs, normalizedBundle, validationMode); if (hasMode(validationMode, MODE_VALIDATE_BUNDLE_HASH) && bundleHashValidity != Validity.VALID) { return bundleHashValidity; } //verify that the bundle only approves tail txs if (hasMode(validationMode, MODE_VALIDATE_TAIL_APPROVAL)) { Validity bundleTailApprovalValidity = validateBundleTailApproval(tangle, bundleTxs); if (bundleTailApprovalValidity != Validity.VALID) { return bundleTailApprovalValidity; } } //verify all transactions within the bundle approve via their branch the trunk transaction of the head transaction if (hasMode(validationMode, MODE_VALIDATE_BUNDLE_TX_APPROVAL)) { Validity bundleTransactionsApprovalValidity = validateBundleTransactionsApproval(bundleTxs); if (bundleTransactionsApprovalValidity != Validity.VALID) { return bundleTransactionsApprovalValidity; } } // verify the signatures of input transactions if (hasMode(validationMode, MODE_VALIDATE_SIGNATURES)) { return validateSignatures(bundleTxs, normalizedBundle); } return Validity.VALID; } /** * <p> * Checks the bundle's semantic validity by checking current/last index positions, making sure that every * transaction of the the bundle is present, value transaction addresses end with a 0 trit, the bundle's aggregation * value doesn't exceed the max supply of tokens and the sum of the bundle equals 0. * </p> * * @param startTx the start transaction from which to built the bundle up from * @param bundleTxsMapping a mapping of the transaction hashes to the actual transactions * @param bundleTxs an empty list which gets filled with the transactions in order of trunk ordering * @return whether the bundle is semantically valid */ public Validity validateBundleSemantics(TransactionViewModel startTx, Map<Hash, TransactionViewModel> bundleTxsMapping, List<TransactionViewModel> bundleTxs) { return validateBundleSemantics(startTx, bundleTxsMapping, bundleTxs, MODE_VALIDATE_SEMANTICS); } /** * <p> * Checks the bundle's semantic validity by checking current/last index positions, making sure that every * transaction of the the bundle is present, value transaction addresses end with a 0 trit, the bundle's aggregation * of value doesn't exceed the max supply of tokens and the sum of the bundle equals 0. * </p> * * <p> * Note that if the validation mode does not include {@link BundleValidator#MODE_VALIDATE_SEMANTICS}, this method * will basically just compute an ordered list of the transactions and will not do any actual validation of any * kind. * </p> * * @param startTx the start transaction from which to built the bundle up from * @param bundleTxsMapping a mapping of the transaction hashes to the actual transactions * @param bundleTxs an empty list which gets filled with the transactions in order of trunk ordering * @param validationMode the used validation mode * @return whether the bundle is semantically valid */ private Validity validateBundleSemantics(TransactionViewModel startTx, Map<Hash, TransactionViewModel> bundleTxsMapping, List<TransactionViewModel> bundleTxs, int validationMode) { TransactionViewModel tvm = startTx; final long lastIndex = tvm.lastIndex(); long bundleValue = 0; if (!hasMode(validationMode, MODE_VALIDATE_SEMANTICS)) { while (tvm != null) { bundleTxs.add(tvm); tvm = bundleTxsMapping.get(tvm.getTrunkTransactionHash()); } return Validity.VALID; } // iterate over all transactions of the bundle and do some basic semantic checks for (int i = 0; i <= lastIndex; tvm = bundleTxsMapping.get(tvm.getTrunkTransactionHash()), i++) { if (tvm == null) { // we miss a transaction, abort return Validity.UNKNOWN; } bundleTxs.add(tvm); // semantic checks if ( tvm.getCurrentIndex() != i || tvm.lastIndex() != lastIndex || ((bundleValue = Math.addExact(bundleValue, tvm.value())) < -TransactionViewModel.SUPPLY || bundleValue > TransactionViewModel.SUPPLY) ) { return Validity.INVALID; } // we lose the last trit by converting from bytes if (tvm.value() != 0 && tvm.getAddressHash().trits()[Curl.HASH_LENGTH - 1] != 0) { return Validity.INVALID; } } // total bundle value sum must be 0 return bundleValue == 0 ? Validity.VALID : Validity.INVALID; } /** * <p> * Computes the normalized bundle hash of the given bundle transactions using the essence data and writes it into * the given normalizedBundleHash byte array. * </p> * * @param bundleTxs a list of ordered (by index) bundle transactions * @param destNormalizedBundleHash an array in which the normalized bundle hash is written into * @return whether the bundle hash of every transaction in the bundle corresponds to the computed bundle hash */ public static Validity validateBundleHash(List<TransactionViewModel> bundleTxs, byte[] destNormalizedBundleHash) { return validateBundleHash(bundleTxs, destNormalizedBundleHash, MODE_VALIDATE_BUNDLE_HASH); } /** * <p> * Computes the normalized bundle hash of the given bundle transactions using the essence data and writes it into * the given normalizedBundleHash byte array. * </p> * <p> * Note that if the validation mode does not include {@link BundleValidator#MODE_VALIDATE_BUNDLE_HASH}, this method * will compute the bundle hash and write it into the normalizedBundleHash parameter, even if it is not valid. * </p> * * @param bundleTxs a list of ordered (by index) bundle transactions * @param destNormalizedBundleHash an array in which the normalized bundle hash is written into * @param validationMode the used validation mode * @return whether the bundle hash of every transaction in the bundle corresponds to the computed bundle hash */ private static Validity validateBundleHash(List<TransactionViewModel> bundleTxs, byte[] destNormalizedBundleHash, int validationMode) { final Sponge curlInstance = SpongeFactory.create(SpongeFactory.Mode.KERL); final byte[] bundleHashTrits = new byte[TransactionViewModel.BUNDLE_TRINARY_SIZE]; // compute actual bundle hash for (final TransactionViewModel tvm2 : bundleTxs) { curlInstance.absorb(tvm2.trits(), TransactionViewModel.ESSENCE_TRINARY_OFFSET, TransactionViewModel.ESSENCE_TRINARY_SIZE); } curlInstance.squeeze(bundleHashTrits, 0, bundleHashTrits.length); // compare the computed bundle hash against each transaction's bundle hash if (hasMode(validationMode, MODE_VALIDATE_BUNDLE_HASH)) { for (TransactionViewModel tvm : bundleTxs) { if (!Arrays.equals(tvm.getBundleHash().trits(), bundleHashTrits)) { return Validity.INVALID; } } } // normalizing the bundle in preparation for signature verification ISSInPlace.normalizedBundle(bundleHashTrits, destNormalizedBundleHash); return Validity.VALID; } /** * Validates the signatures of the given bundle transactions. The transactions must be ordered by index. * * @param bundleTxs a list of ordered (by index) bundle transactions * @param normalizedBundle the normalized bundle hash * @return whether all signatures were valid given the bundle hash and addresses */ public static Validity validateSignatures(List<TransactionViewModel> bundleTxs, byte[] normalizedBundle) { final Sponge addressInstance = SpongeFactory.create(SpongeFactory.Mode.KERL); final byte[] addressTrits = new byte[TransactionViewModel.ADDRESS_TRINARY_SIZE]; final byte[] digestTrits = new byte[Curl.HASH_LENGTH]; TransactionViewModel tvm; for (int j = 0; j < bundleTxs.size(); ) { // iterate until next input transaction tvm = bundleTxs.get(j); if (tvm.value() >= 0) { j++; continue; } // verify the signature of the input address by computing the address addressInstance.reset(); int offset = 0, offsetNext = 0; do { offsetNext = (offset + ISS.NUMBER_OF_FRAGMENT_CHUNKS - 1) % (Curl.HASH_LENGTH / Converter.NUMBER_OF_TRITS_IN_A_TRYTE) + 1; ISSInPlace.digest(SpongeFactory.Mode.KERL, normalizedBundle, offset % (Curl.HASH_LENGTH / Converter.NUMBER_OF_TRITS_IN_A_TRYTE), bundleTxs.get(j).trits(), TransactionViewModel.SIGNATURE_MESSAGE_FRAGMENT_TRINARY_OFFSET, digestTrits); addressInstance.absorb(digestTrits, 0, Curl.HASH_LENGTH); offset = offsetNext; } //loop to traverse signature fragments divided between transactions while (++j < bundleTxs.size() && bundleTxs.get(j).getAddressHash().equals(tvm.getAddressHash()) && bundleTxs.get(j).value() == 0); addressInstance.squeeze(addressTrits, 0, addressTrits.length); // verify the signature: compare the address against the computed address // derived from the signature/bundle hash if (!Arrays.equals(tvm.getAddressHash().trits(), addressTrits)) { return Validity.INVALID; } } return Validity.VALID; } /** * Checks that the bundle's inputs and outputs are balanced. * * @param transactionViewModels collection of transactions that are in a bundle * @return {@code true} if balanced, {@code false} if unbalanced or {@code transactionViewModels} is empty */ public static boolean isInconsistent(Collection<TransactionViewModel> transactionViewModels) { long sum = transactionViewModels.stream().map(TransactionViewModel::value).reduce(0L, Long::sum); return (sum != 0 || transactionViewModels.isEmpty()); } /** * A bundle is invalid if The branch transaction hash of the non head transactions within a bundle, is not the same * as the trunk transaction hash of the head transaction. * * @param bundleTxs list of transactions that are in a bundle. * @return Whether the bundle tx chain is valid. */ @VisibleForTesting Validity validateBundleTransactionsApproval(List<TransactionViewModel> bundleTxs) { Hash headTrunkTransactionHash = bundleTxs.get(bundleTxs.size() - 1).getTrunkTransactionHash(); for(int i = 0; i < bundleTxs.size() - 1; i++){ if(!bundleTxs.get(i).getBranchTransactionHash().equals(headTrunkTransactionHash)){ return Validity.INVALID; } } return Validity.VALID; } /** * A bundle is invalid if the trunk and branch transactions approved by the bundle are non tails. * * @param bundleTxs The txs in the bundle. * @return Whether the bundle approves only tails. */ @VisibleForTesting Validity validateBundleTailApproval(Tangle tangle, List<TransactionViewModel> bundleTxs) throws Exception { TransactionViewModel headTx = bundleTxs.get(bundleTxs.size() - 1); TransactionViewModel bundleTrunkTvm = headTx.getTrunkTransaction(tangle); TransactionViewModel bundleBranchTvm = headTx.getBranchTransaction(tangle); return bundleTrunkTvm != null && bundleBranchTvm != null && bundleBranchTvm.getCurrentIndex() == 0 && bundleTrunkTvm.getCurrentIndex() == 0 ? Validity.VALID : Validity.INVALID; } /** * Traverses down the given {@code tail} trunk until all transactions that belong to the same bundle (identified by * the bundle hash) are found and loaded. * * @param tangle connection to the persistence layer * @param tail should be the last transaction of the bundle * @param skipIndexChecking whether to skip checking the indices while loading the transactions * @return map of all transactions in the bundle, mapped by their transaction hash */ private static Map<Hash, TransactionViewModel> loadTransactionsFromTangle(Tangle tangle, TransactionViewModel tail, boolean skipIndexChecking) { final Map<Hash, TransactionViewModel> bundleTransactions = new HashMap<>(); final Hash bundleHash = tail.getBundleHash(); try { TransactionViewModel tx = tail; long i = tx.getCurrentIndex(), end = tx.lastIndex(); do { bundleTransactions.put(tx.getHash(), tx); tx = tx.getTrunkTransaction(tangle); } while ( // if we are skipping the index checking, we must make sure that we are not // having an empty bundle hash, as it would lead to an OOM where the genesis // transaction is loaded over and over again ((skipIndexChecking && !tx.getHash().equals(Hash.NULL_HASH)) || (i++ < end && tx.getCurrentIndex() != 0)) && tx.getBundleHash().equals(bundleHash) ); } catch (Exception e) { e.printStackTrace(); } return bundleTransactions; } }
25,037
47.712062
192
java
iri
iri-master/src/main/java/com/iota/iri/IRI.java
package com.iota.iri; import com.google.inject.Guice; import com.google.inject.Injector; import com.iota.iri.conf.BaseIotaConfig; import com.iota.iri.conf.Config; import com.iota.iri.conf.ConfigFactory; import com.iota.iri.conf.IotaConfig; import com.iota.iri.network.NetworkInjectionConfiguration; import com.iota.iri.service.API; import com.iota.iri.utils.IotaUtils; import com.iota.iri.service.restserver.resteasy.RestEasy; import java.io.File; import java.io.IOException; import java.util.Arrays; import com.beust.jcommander.JCommander; import com.beust.jcommander.ParameterException; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.BooleanUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * Main IOTA Reference Implementation (IRI) starting class. * <p> * The IRI software enables the Tangle to operate. Individuals can run IRI to operates Nodes. * The Node running the IRI software enables your device to communicate with neighbors * in the peer-to-peer network that the Tangle operates on. * </p> * <p> * IRI implements all the core functionality necessary for participating in an IOTA network as a full node. * This includes, but is not limited to: * <ul> * <li>Receiving and broadcasting transactions through TCP</li> * <li>Handling of HTTP requests from clients.</li> * <li>Tracking and validating Milestones.</li> * <li>Loading custom modules that extend the API.</li> * </ul> * </p> * * @see <a href="https://docs.iota.org/docs/node-software/0.1/iri/introduction/overview">Online documentation on iri</a> */ public class IRI { public static final String MAINNET_NAME = "IRI"; public static final String TESTNET_NAME = "IRI Testnet"; /** * The entry point of IRI. * Starts by configuring the logging settings, then proceeds to {@link IRILauncher#main(String[])} * The log level is set to INFO by default. * * @param args Configuration arguments. See {@link BaseIotaConfig} for a list of all options. * @throws Exception If we fail to start the IRI launcher. */ public static void main(String[] args) throws Exception { // Logging is configured first before ANY references to Logger or LoggerFactory. // Any public method or field accessors needed in IRI should be put in IRI and then delegate to IRILauncher. // That ensures that future code does not need to know about this setup. configureLogging(); IRILauncher.main(args); } /** * Reads the logging configuration file and logging level from system properties. You can set this values as * arguments to the Java VM by passing <code>-Dlogback.configurationFile=/path/to/config.xml -Dlogging-level=DEBUG</code> * to the Java VM. If no system properties are specified the logback default values and logging-level INFO will * be used. */ private static void configureLogging() { String config = System.getProperty("logback.configurationFile"); String level = System.getProperty("logging-level", "").toUpperCase(); switch (level) { case "OFF": case "ERROR": case "WARN": case "INFO": case "DEBUG": case "TRACE": break; case "ALL": level = "TRACE"; break; default: level = "INFO"; break; } System.getProperties().put("logging-level", level); System.out.println("Logging - property 'logging-level' set to: [" + level + "]"); if (config != null) { System.out.println("Logging - alternate logging configuration file specified at: '" + config + "'"); } } private static class IRILauncher { private static final Logger log = LoggerFactory.getLogger(IRILauncher.class); public static Iota iota; public static API api; public static IXI ixi; /** * Starts IRI. Setup is as follows: * <ul> * <li>Load the configuration.</li> * <li>Create {@link Iota}, {@link IXI} and {@link API}.</li> * <li>Listen for node shutdown.</li> * <li>Initialize {@link Iota}, {@link IXI} and {@link API} using their <tt>init()</tt> methods.</li> * </ul> * * If no exception is thrown, the node starts synchronizing with the network, and the API can be used. * * @param args Configuration arguments. See {@link BaseIotaConfig} for a list of all options. * @throws Exception If any of the <tt>init()</tt> methods failed to initialize. */ public static void main(String [] args) throws Exception { IotaConfig config = createConfiguration(args); String version = IotaUtils.getIriVersion(); log.info("Welcome to {} {}", config.isTestnet() ? TESTNET_NAME : MAINNET_NAME, version); Injector injector = Guice.createInjector( new MainInjectionConfiguration(config), new NetworkInjectionConfiguration(config)); iota = injector.getInstance(Iota.class); ixi = injector.getInstance(IXI.class); api = injector.getInstance(API.class); shutdownHook(); try { iota.init(); //TODO redundant parameter but we will touch this when we refactor IXI ixi.init(config.getIxiDir()); api.init(new RestEasy(config)); log.info("IOTA Node initialised correctly."); } catch (Exception e) { log.error("Exception during IOTA node initialisation: ", e); throw e; } } /** * Gracefully shuts down the node by calling <tt>shutdown()</tt> on {@link Iota}, {@link IXI} and {@link API}. * Exceptions during shutdown are caught and logged. */ private static void shutdownHook() { Runtime.getRuntime().addShutdownHook(new Thread(() -> { log.info("Shutting down IOTA node, please hold tight..."); try { ixi.shutdown(); api.shutDown(); iota.shutdown(); } catch (Exception e) { log.error("Exception occurred shutting down IOTA node: ", e); } }, "Shutdown Hook")); } private static IotaConfig createConfiguration(String[] args) { IotaConfig iotaConfig = null; String message = "Configuration is created using "; try { boolean testnet = isTestnet(args); File configFile = chooseConfigFile(args); if (configFile != null) { iotaConfig = ConfigFactory.createFromFile(configFile, testnet); message += configFile.getName() + " and command line args"; } else { iotaConfig = ConfigFactory.createIotaConfig(testnet); message += "command line args only"; } JCommander jCommander = iotaConfig.parseConfigFromArgs(args); if (iotaConfig.isHelp()) { jCommander.usage(); System.exit(0); } } catch (IOException | IllegalArgumentException e) { log.error("There was a problem reading configuration from file: {}", e.getMessage()); log.debug("", e); System.exit(-1); } catch (ParameterException e) { log.error("There was a problem parsing commandline arguments: {}", e.getMessage()); log.debug("", e); System.exit(-1); } log.info(message); log.info("parsed the following cmd args: {}", Arrays.toString(args)); return iotaConfig; } /** * We are connected to testnet when {@link Config#TESTNET_FLAG} is passed in program startup, * following with <code>true</code> * * @param args the list of program startup arguments * @return <code>true</code> if this is testnet, otherwise <code>false</code> */ private static boolean isTestnet(String[] args) { int index = ArrayUtils.indexOf(args, Config.TESTNET_FLAG); if (index != -1 && args.length > index+1) { Boolean bool = BooleanUtils.toBooleanObject(args[index+1]); return bool == null ? false : bool; } return false; } /** * Parses the command line arguments for a config file that can be provided by parameter <code>-c</code> * or parameter <code>--config</code>. If no filename was provided we fall back to <code>iota.ini</code> file. * If no <code>iota.ini</code> file can be found return null. * * @param args command line arguments passed to main method. * @return File the chosen file to use as config, or null. */ private static File chooseConfigFile(String[] args) { int index = Math.max(ArrayUtils.indexOf(args, "-c"), ArrayUtils.indexOf(args, "--config")); if (index != -1) { try { String fileName = args[++index]; return new File(fileName); } catch (Exception e) { throw new IllegalArgumentException( "The file after `-c` or `--config` isn't specified or can't be parsed.", e); } } else if (IotaConfig.CONFIG_FILE.exists()) { return IotaConfig.CONFIG_FILE; } return null; } } }
9,992
40.6375
125
java
iri
iri-master/src/main/java/com/iota/iri/IXI.java
package com.iota.iri; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.iota.iri.service.CallableRequest; import com.iota.iri.service.dto.AbstractResponse; import com.iota.iri.service.dto.ErrorResponse; import com.sun.nio.file.SensitivityWatchEventModifier; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.script.Bindings; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import javax.script.ScriptException; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.Reader; import java.nio.file.*; import java.nio.file.attribute.BasicFileAttributes; import java.time.Instant; import java.util.EnumSet; import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; import static com.sun.jmx.mbeanserver.Util.cast; import static java.nio.file.StandardWatchEventKinds.*; public class IXI { private static final Logger log = LoggerFactory.getLogger(IXI.class); private static final int MAX_TREE_DEPTH = 2; private final Gson gson = new GsonBuilder().create(); private final ScriptEngine scriptEngine = (new ScriptEngineManager()).getEngineByName("JavaScript"); private final Map<String, Map<String, CallableRequest<AbstractResponse>>> ixiAPI = new HashMap<>(); private final Map<String, Map<String, Runnable>> ixiLifetime = new HashMap<>(); private final Map<WatchKey, Path> watchKeys = new HashMap<>(); private final Map<Path, Long> loadedLastTime = new HashMap<>(); private WatchService watcher; private Thread dirWatchThread; private Path rootPath; private boolean shutdown = false; private final Iota iota; public IXI() { iota = null; } public IXI(Iota iota) { this.iota = iota; } public void init(String rootDir) throws IOException { if(rootDir.length() > 0) { watcher = FileSystems.getDefault().newWatchService(); this.rootPath = Paths.get(rootDir); if(this.rootPath.toFile().exists() || this.rootPath.toFile().mkdir()) { registerRecursive(this.rootPath); dirWatchThread = (new Thread(this::processWatchEvents)); dirWatchThread.start(); } } } private void registerRecursive(final Path root) throws IOException { Files.walkFileTree(root, EnumSet.allOf(FileVisitOption.class), MAX_TREE_DEPTH, new SimpleFileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path modulePath, BasicFileAttributes attrs) { watch(modulePath); if (!modulePath.equals(rootPath)) { loadModule(modulePath); } return FileVisitResult.CONTINUE; } }); } private void processWatchEvents() { while(!shutdown) { WatchKey key = null; try { key = watcher.poll(1000, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { log.error("Watcher interrupted: ", e); } if (key == null) { continue; } WatchKey finalKey = key; key.pollEvents().forEach(watchEvent -> { WatchEvent<Path> pathEvent = cast(watchEvent); IxiEvent ixiEvent = IxiEvent.fromName(watchEvent.kind().name()); Path watchedPath = watchKeys.get(finalKey); if (watchedPath != null) { handleModulePathEvent(watchedPath, ixiEvent, watchedPath.resolve(pathEvent.context())); } }); key.reset(); } } private String getModuleName(Path modulePath, boolean checkIfIsDir) { return rootPath.relativize(!checkIfIsDir || Files.isDirectory(modulePath) ? modulePath : modulePath.getParent()).toString(); } private Path getRealPath(Path currentPath) { if (Files.isDirectory(currentPath.getParent()) && !currentPath.getParent().equals(rootPath)) { return currentPath.getParent(); } else { return currentPath; } } private void handleModulePathEvent(Path watchedPath, IxiEvent ixiEvent, Path changedPath) { if (!watchedPath.equals(rootPath) && Files.isDirectory(changedPath)) { // we are only interested in dir changes in tree depth level 2 return; } handlePathEvent(ixiEvent, changedPath); } private void handlePathEvent(IxiEvent ixiEvent, Path changedPath) { switch(ixiEvent) { case CREATE_MODULE: if (checkOs() == OsVariants.Unix) { watch(changedPath); loadModule(changedPath); } break; case MODIFY_MODULE: Long lastModification = loadedLastTime.get(getRealPath(changedPath)); if (lastModification == null || Instant.now().toEpochMilli() - lastModification > 50L) { if (ixiLifetime.containsKey(getModuleName(changedPath, true))) { unloadModule(changedPath); } loadedLastTime.put(getRealPath(changedPath), Instant.now().toEpochMilli()); loadModule(getRealPath(changedPath)); } break; case DELETE_MODULE: Path realPath = getRealPath(changedPath); unwatch(realPath); if (ixiLifetime.containsKey(getModuleName(realPath, false))) { unloadModule(changedPath); } break; default: } } private static OsVariants checkOs() { String os = System.getProperty("os.name"); if (os.startsWith("Windows")) { return OsVariants.Windows; } else { return OsVariants.Unix; } } private void watch(Path dir) { try { WatchKey watchKey = dir.register(watcher, new WatchEvent.Kind[]{ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY}, SensitivityWatchEventModifier.HIGH); watchKeys.put(watchKey, dir); } catch (IOException e) { log.error("Could not create watcher for path '{}'.", dir); } } private void unwatch(Path dir) { // TODO: Get watchkey for 'dir' in an optimized way Optional<WatchKey> dirKey = watchKeys.keySet().stream().filter(watchKey -> watchKeys.get(watchKey).equals(dir)).findFirst(); if (dirKey.isPresent()) { watchKeys.remove(dirKey.get()); dirKey.get().cancel(); } } private Path getPackagePath(Path modulePath) { return modulePath.resolve("package.json"); } public AbstractResponse processCommand(final String command, Map<String, Object> request) { if(command == null || command.isEmpty()) { return ErrorResponse.create("Command can not be null or empty"); } Pattern pattern = Pattern.compile("^(.*)\\.(.*)$"); Matcher matcher = pattern.matcher(command); if (matcher.find()) { Map<String, CallableRequest<AbstractResponse>> ixiMap = ixiAPI.get(matcher.group(1)); if (ixiMap != null && ixiMap.containsKey(matcher.group(2))) { return ixiMap.get(matcher.group(2)).call(request); } } return ErrorResponse.create("Command [" + command + "] is unknown"); } private void loadModule(Path modulePath) { log.info("Searching: {}", modulePath); Path packageJsonPath = getPackagePath(modulePath); if (!Files.exists(packageJsonPath)) { log.info("No package.json found in {}", modulePath); return; } Map packageJson; try (Reader packageJsonReader = new FileReader(packageJsonPath.toFile()) ){ packageJson = gson.fromJson(packageJsonReader, Map.class); } catch (IOException e) { log.error("Could not load {}", packageJsonPath); return; } if(packageJson != null && packageJson.get("main") != null) { log.info("Loading module: {}", getModuleName(modulePath, true)); Path pathToMain = Paths.get(modulePath.toString(), (String) packageJson.get("main")); attach(pathToMain, getModuleName(modulePath, true)); } else { log.info("No start script found"); } } private void unloadModule(Path moduleNamePath) { log.debug("Unloading module: {}", moduleNamePath); Path realPath = getRealPath(moduleNamePath); String moduleName = getModuleName(realPath, false); detach(moduleName); ixiAPI.remove(moduleName); } private void attach(Path pathToMain, String moduleName) { try (Reader ixiModuleReader = new FileReader(pathToMain.toFile())) { log.info("Starting script: {}", pathToMain); Map<String, CallableRequest<AbstractResponse>> ixiMap = new HashMap<>(); Map<String, Runnable> startStop = new HashMap<>(); Bindings bindings = scriptEngine.createBindings(); bindings.put("API", ixiMap); bindings.put("IXICycle", startStop); bindings.put("IOTA", iota); ixiAPI.put(moduleName, ixiMap); ixiLifetime.put(moduleName, startStop); try { scriptEngine.eval(ixiModuleReader, bindings); } catch (ScriptException e) { log.error("Script error", e); } } catch (IOException e) { log.error("Could not load {}", pathToMain); } } private void detach(String moduleName) { Map<String, Runnable> ixiMap = ixiLifetime.get(moduleName); if(ixiMap != null) { Runnable stop = ixiMap.get("shutdown"); if (stop != null) { stop.run(); } } ixiLifetime.remove(moduleName); } /** * Cleans up the environment, shutdown the dir watcher thread and wait till all running api calls are completed. * @throws InterruptedException if directory watching thread was unexpected interrupted. */ public void shutdown() throws InterruptedException { if(dirWatchThread != null) { shutdown = true; dirWatchThread.join(); ixiAPI.keySet().forEach(this::detach); ixiAPI.clear(); ixiLifetime.clear(); } } }
10,703
36.426573
155
java
iri
iri-master/src/main/java/com/iota/iri/Iota.java
package com.iota.iri; import com.iota.iri.conf.IotaConfig; import com.iota.iri.controllers.TipsViewModel; import com.iota.iri.controllers.TransactionViewModel; import com.iota.iri.network.NeighborRouter; import com.iota.iri.network.TipsRequester; import com.iota.iri.network.TransactionRequester; import com.iota.iri.network.pipeline.TransactionProcessingPipeline; import com.iota.iri.service.ledger.LedgerService; import com.iota.iri.service.milestone.*; import com.iota.iri.service.snapshot.LocalSnapshotManager; import com.iota.iri.service.snapshot.SnapshotException; import com.iota.iri.service.snapshot.SnapshotProvider; import com.iota.iri.service.snapshot.SnapshotService; import com.iota.iri.service.snapshot.conditions.SnapshotDepthCondition; import com.iota.iri.service.spentaddresses.SpentAddressesException; import com.iota.iri.service.spentaddresses.SpentAddressesProvider; import com.iota.iri.service.spentaddresses.SpentAddressesService; import com.iota.iri.service.tipselection.TipSelector; import com.iota.iri.service.transactionpruning.DepthPruningCondition; import com.iota.iri.service.transactionpruning.SizePruningCondition; import com.iota.iri.service.transactionpruning.TransactionPruner; import com.iota.iri.service.validation.TransactionSolidifier; import com.iota.iri.service.validation.TransactionValidator; import com.iota.iri.storage.*; import com.iota.iri.storage.rocksDB.RocksDBPersistenceProvider; import com.iota.iri.utils.Pair; import com.iota.iri.zmq.ZmqMessageQueueProvider; import java.util.List; import java.util.Map; import org.apache.commons.lang3.NotImplementedException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * The main class of IRI. This will propagate transactions into and throughout the network. This data is stored as a * {@link Tangle}, a form of a Directed acyclic graph. All incoming data will be stored in one or more implementations * of {@link PersistenceProvider}. * * <p> * During initialization, all the Providers can be set to rescan or revalidate their transactions. After initialization, * an asynchronous process has started which will process inbound and outbound transactions. Each full node should be * peered with 3-5 other full nodes (neighbors) to function optimally. * </p> * <p> * If this node has no Neighbors defined, no data is transferred. However, if the node has Neighbors, but no Internet * connection, synchronization will continue after Internet connection is established. Any transactions sent to this * node in its local network will then be processed. This makes IRI able to run partially offline if an already existing * database exists on this node. * </p> * <p> * Validation of a transaction is the process by which other devices choose the transaction. * This is done via a {@link TipSelector} algorithm, after which the transaction performs * the necessary proof-of-work in order to cast their vote of confirmation/approval upon those tips. * </p> * <p> * As many other transactions repeat this process on top of each other, * validation of the transaction in question slowly builds up enough verifications. * Eventually this will reach a minimum acceptable verification threshold. * This threshold is determined by the recipient of the transaction. * When this minimum threshold is reached, the transaction is "confirmed". * </p> * */ public class Iota { private static final Logger log = LoggerFactory.getLogger(Iota.class); //Fields must be public in order to be used by IXI public final SpentAddressesProvider spentAddressesProvider; public final SpentAddressesService spentAddressesService; public final SnapshotProvider snapshotProvider; public final SnapshotService snapshotService; public final LocalSnapshotManager localSnapshotManager; public final MilestoneService milestoneService; public final SeenMilestonesRetriever seenMilestonesRetriever; public final LedgerService ledgerService; public final TransactionPruner transactionPruner; public final MilestoneSolidifier milestoneSolidifier; public final TransactionSolidifier transactionSolidifier; public final BundleValidator bundleValidator; public final Tangle tangle; public final TransactionValidator transactionValidator; public final TransactionRequester transactionRequester; public final TipsRequester tipsRequester; public final TransactionProcessingPipeline txPipeline; public final NeighborRouter neighborRouter; // used in test public final IotaConfig configuration; public final TipsViewModel tipsViewModel; public final TipSelector tipsSelector; public LocalSnapshotsPersistenceProvider localSnapshotsDb; /** * Initializes the latest snapshot and then creates all services needed to run an IOTA node. * * @param configuration Information about how this node will be configured. * */ public Iota(IotaConfig configuration, SpentAddressesProvider spentAddressesProvider, SpentAddressesService spentAddressesService, SnapshotProvider snapshotProvider, SnapshotService snapshotService, LocalSnapshotManager localSnapshotManager, MilestoneService milestoneService, SeenMilestonesRetriever seenMilestonesRetriever, LedgerService ledgerService, TransactionPruner transactionPruner, MilestoneSolidifier milestoneSolidifier, BundleValidator bundleValidator, Tangle tangle, TransactionValidator transactionValidator, TransactionRequester transactionRequester, NeighborRouter neighborRouter, TransactionProcessingPipeline transactionProcessingPipeline, TipsRequester tipsRequester, TipsViewModel tipsViewModel, TipSelector tipsSelector, LocalSnapshotsPersistenceProvider localSnapshotsDb, TransactionSolidifier transactionSolidifier) { this.configuration = configuration; this.ledgerService = ledgerService; this.spentAddressesProvider = spentAddressesProvider; this.spentAddressesService = spentAddressesService; this.snapshotProvider = snapshotProvider; this.snapshotService = snapshotService; this.localSnapshotManager = localSnapshotManager; this.milestoneService = milestoneService; this.seenMilestonesRetriever = seenMilestonesRetriever; this.milestoneSolidifier = milestoneSolidifier; this.transactionPruner = transactionPruner; this.neighborRouter = neighborRouter; this.txPipeline = transactionProcessingPipeline; this.tipsRequester = tipsRequester; this.transactionSolidifier = transactionSolidifier; this.localSnapshotsDb = localSnapshotsDb; // legacy classes this.bundleValidator = bundleValidator; this.tangle = tangle; this.tipsViewModel = tipsViewModel; this.transactionRequester = transactionRequester; this.transactionValidator = transactionValidator; this.tipsSelector = tipsSelector; } private void initDependencies() throws SnapshotException, SpentAddressesException { //snapshot provider must be initialized first //because we check whether spent addresses data exists snapshotProvider.init(); boolean assertSpentAddressesExistence = !configuration.isTestnet() && snapshotProvider.getInitialSnapshot().getIndex() != configuration.getMilestoneStartIndex(); spentAddressesProvider.init(assertSpentAddressesExistence); seenMilestonesRetriever.init(); if (transactionPruner != null) { transactionPruner.init(); } } /** * <p> * Adds all database providers, and starts initialization of our services. * According to the {@link IotaConfig}, data is optionally cleared, reprocessed and reverified. * </p> * After this function, incoming and outbound transaction processing has started. * * @throws Exception If along the way a service fails to initialize. Most common cause is a file read or database * error. */ public void init() throws Exception { localSnapshotsDb.init(); initDependencies(); // remainder of injectDependencies method (contained init code) initializeTangle(); tangle.init(); if (configuration.isRescanDb()) { rescanDb(); } if (configuration.isRevalidate()) { tangle.clearColumn(com.iota.iri.model.persistables.Milestone.class); tangle.clearColumn(com.iota.iri.model.StateDiff.class); tangle.clearMetadata(com.iota.iri.model.persistables.Transaction.class); } txPipeline.start(); neighborRouter.start(); tipsRequester.start(); seenMilestonesRetriever.start(); milestoneSolidifier.start(); transactionSolidifier.start(); if (localSnapshotManager != null) { localSnapshotManager.addSnapshotCondition(new SnapshotDepthCondition(configuration, snapshotProvider)); localSnapshotManager.addPruningConditions( new DepthPruningCondition(configuration, snapshotProvider, tangle), new SizePruningCondition(tangle, configuration)); localSnapshotManager.start(milestoneSolidifier); } if (transactionPruner != null) { transactionPruner.start(); } } private void rescanDb() throws Exception { // delete all transaction indexes tangle.clearColumn(com.iota.iri.model.persistables.Address.class); tangle.clearColumn(com.iota.iri.model.persistables.Bundle.class); tangle.clearColumn(com.iota.iri.model.persistables.Approvee.class); tangle.clearColumn(com.iota.iri.model.persistables.ObsoleteTag.class); tangle.clearColumn(com.iota.iri.model.persistables.Tag.class); tangle.clearColumn(com.iota.iri.model.persistables.Milestone.class); tangle.clearColumn(com.iota.iri.model.StateDiff.class); tangle.clearMetadata(com.iota.iri.model.persistables.Transaction.class); // rescan all tx & refill the columns TransactionViewModel tx = TransactionViewModel.first(tangle); int counter = 0; while (tx != null) { if (++counter % 10000 == 0) { log.info("Rescanned {} Transactions", counter); } List<Pair<Indexable, Persistable>> saveBatch = tx.getSaveBatch(); //don't re-save the tx itself saveBatch.remove(saveBatch.size() - 1); tangle.saveBatch(saveBatch); tx = tx.next(tangle); } } /** * Gracefully shuts down by calling <tt>shutdown()</tt> on all used services. Exceptions during shutdown are not * caught. */ public void shutdown() throws Exception { // shutdown in reverse starting order (to not break any dependencies) milestoneSolidifier.shutdown(); transactionSolidifier.shutdown(); seenMilestonesRetriever.shutdown(); if (transactionPruner != null) { transactionPruner.shutdown(); } if (localSnapshotManager != null) { localSnapshotManager.shutdown(); } tipsRequester.shutdown(); txPipeline.shutdown(); neighborRouter.shutdown(); localSnapshotsDb.shutdown(); tangle.shutdown(); // free the resources of the snapshot provider last because all other instances need it snapshotProvider.shutdown(); } private void initializeTangle() { switch (configuration.getMainDb()) { case "rocksdb": { tangle.addPersistenceProvider(createRocksDbProvider( configuration.getDbPath(), configuration.getDbLogPath(), configuration.getDbConfigFile(), configuration.getDbCacheSize(), Tangle.COLUMN_FAMILIES, Tangle.METADATA_COLUMN_FAMILY) ); break; } default: { throw new NotImplementedException("No such database type."); } } if (configuration.isZmqEnabled()) { tangle.addMessageQueueProvider(new ZmqMessageQueueProvider(configuration)); } } /** * Creates a new Persistable provider with the supplied settings * * @param path The location where the database will be stored * @param log The location where the log files will be stored * @param configFile The location where the RocksDB config is read from * @param cacheSize the size of the cache used by the database implementation * @param columnFamily A map of the names related to their Persistable class * @param metadata Map of metadata used by the Persistable class, can be <code>null</code> * @return A new Persistance provider */ private PersistenceProvider createRocksDbProvider(String path, String log, String configFile, int cacheSize, Map<String, Class<? extends Persistable>> columnFamily, Map.Entry<String, Class<? extends Persistable>> metadata) { return new RocksDBPersistenceProvider( path, log, configFile, cacheSize, columnFamily, metadata); } }
13,557
42.735484
120
java
iri
iri-master/src/main/java/com/iota/iri/IxiEvent.java
package com.iota.iri; import java.util.Arrays; import java.util.Optional; public enum IxiEvent { CREATE_MODULE("ENTRY_CREATE"), MODIFY_MODULE("ENTRY_MODIFY"), DELETE_MODULE("ENTRY_DELETE"), OVERFLOW("OVERFLOW"), UNKNOWN("UNKNOWN"); private String name; IxiEvent(String name) { this.name = name; } public String getName() { return name; } public static IxiEvent fromName(String name) { Optional<IxiEvent> ixiEvent = Arrays.stream(IxiEvent.values()).filter(event -> event.name.equals(name)).findFirst(); return ixiEvent.orElse(UNKNOWN); } }
625
21.357143
124
java
iri
iri-master/src/main/java/com/iota/iri/MainInjectionConfiguration.java
package com.iota.iri; import com.iota.iri.conf.IotaConfig; import com.iota.iri.controllers.TipsViewModel; import com.iota.iri.network.NeighborRouter; import com.iota.iri.network.TipsRequester; import com.iota.iri.network.TransactionRequester; import com.iota.iri.network.pipeline.TransactionProcessingPipeline; import com.iota.iri.service.API; import com.iota.iri.service.ledger.LedgerService; import com.iota.iri.service.ledger.impl.LedgerServiceImpl; import com.iota.iri.service.milestone.InSyncService; import com.iota.iri.service.milestone.MilestoneService; import com.iota.iri.service.milestone.MilestoneSolidifier; import com.iota.iri.service.milestone.SeenMilestonesRetriever; import com.iota.iri.service.milestone.impl.MilestoneInSyncService; import com.iota.iri.service.milestone.impl.MilestoneServiceImpl; import com.iota.iri.service.milestone.impl.MilestoneSolidifierImpl; import com.iota.iri.service.milestone.impl.SeenMilestonesRetrieverImpl; import com.iota.iri.service.snapshot.LocalSnapshotManager; import com.iota.iri.service.snapshot.SnapshotProvider; import com.iota.iri.service.snapshot.SnapshotService; import com.iota.iri.service.snapshot.impl.LocalSnapshotManagerImpl; import com.iota.iri.service.snapshot.impl.SnapshotProviderImpl; import com.iota.iri.service.snapshot.impl.SnapshotServiceImpl; import com.iota.iri.service.spentaddresses.SpentAddressesProvider; import com.iota.iri.service.spentaddresses.SpentAddressesService; import com.iota.iri.service.spentaddresses.impl.SpentAddressesProviderImpl; import com.iota.iri.service.spentaddresses.impl.SpentAddressesServiceImpl; import com.iota.iri.service.tipselection.*; import com.iota.iri.service.tipselection.impl.*; import com.iota.iri.service.transactionpruning.TransactionPruner; import com.iota.iri.service.transactionpruning.async.AsyncTransactionPruner; import com.iota.iri.service.validation.TransactionSolidifier; import com.iota.iri.service.validation.TransactionValidator; import com.iota.iri.service.validation.impl.TransactionSolidifierImpl; import com.iota.iri.storage.LocalSnapshotsPersistenceProvider; import com.iota.iri.storage.Tangle; import com.iota.iri.storage.rocksDB.RocksDBPersistenceProvider; import com.google.inject.AbstractModule; import com.google.inject.Provides; import com.google.inject.Singleton; import java.security.SecureRandom; import javax.annotation.Nullable; /** * Guice module. Configuration class for dependency injection. */ public class MainInjectionConfiguration extends AbstractModule { private final IotaConfig configuration; /** * Creates the guice injection module. * @param configuration The iota configuration used for conditional bean creation and constructing beans with * configuration parameters. */ public MainInjectionConfiguration(IotaConfig configuration) { this.configuration = configuration; } @Singleton @Provides SnapshotProvider provideSnapshotProvider(LocalSnapshotsPersistenceProvider localSnapshotsPersistenceProvider) { return new SnapshotProviderImpl(configuration, localSnapshotsPersistenceProvider); } @Singleton @Provides SpentAddressesProvider provideSpentAddressesProvider(LocalSnapshotsPersistenceProvider localSnapshotsDb) { return new SpentAddressesProviderImpl(configuration, localSnapshotsDb); } @Singleton @Provides SnapshotService provideSnapshotService(Tangle tangle, SnapshotProvider snapshotProvider) { return new SnapshotServiceImpl(tangle, snapshotProvider, configuration); } @Singleton @Provides MilestoneService provideMilestoneService(Tangle tangle, SnapshotProvider snapshotProvider, SnapshotService snapshotService, BundleValidator bundleValidator) { return new MilestoneServiceImpl(tangle, snapshotProvider, snapshotService, bundleValidator, configuration); } @Singleton @Provides SpentAddressesService provideSpentAddressesService(Tangle tangle, SnapshotProvider snapshotProvider, SpentAddressesProvider spentAddressesProvider, BundleValidator bundleValidator) { return new SpentAddressesServiceImpl(tangle, snapshotProvider, spentAddressesProvider, bundleValidator, configuration); } @Singleton @Provides LedgerService provideLedgerService(Tangle tangle, SnapshotProvider snapshotProvider, SnapshotService snapshotService, MilestoneService milestoneService, SpentAddressesService spentAddressesService, BundleValidator bundleValidator) { return new LedgerServiceImpl(tangle, snapshotProvider, snapshotService, milestoneService, spentAddressesService, bundleValidator); } @Singleton @Provides SeenMilestonesRetriever provideSeenMilestonesRetriever(Tangle tangle, SnapshotProvider snapshotProvider, TransactionRequester transactionRequester) { return new SeenMilestonesRetrieverImpl(tangle, snapshotProvider, transactionRequester); } @Singleton @Provides MilestoneSolidifier provideMilestoneSolidifier(SnapshotProvider snapshotProvider, TransactionSolidifier transactionSolidifier, Tangle tangle, LedgerService ledgerService, TransactionRequester transactionRequester, MilestoneService milestoneService) { return new MilestoneSolidifierImpl(transactionSolidifier, tangle, snapshotProvider, ledgerService, transactionRequester, milestoneService, configuration); } @Singleton @Provides TransactionPruner provideTransactionPruner(Tangle tangle, SnapshotProvider snapshotProvider, SpentAddressesService spentAddressesService, SpentAddressesProvider spentAddressesProvider, TipsViewModel tipsViewModel) { return configuration.getLocalSnapshotsEnabled() && configuration.getLocalSnapshotsPruningEnabled() ? new AsyncTransactionPruner(tangle, snapshotProvider, spentAddressesService, spentAddressesProvider, tipsViewModel, configuration) : null; } @Singleton @Provides InSyncService provideInSyncService(SnapshotProvider snapshotProvider, MilestoneSolidifier milestoneSolidifier) { return new MilestoneInSyncService(snapshotProvider, milestoneSolidifier); } @Singleton @Provides LocalSnapshotManager provideLocalSnapshotManager(SnapshotProvider snapshotProvider, SnapshotService snapshotService, @Nullable TransactionPruner transactionPruner, InSyncService inSyncService) { return configuration.getLocalSnapshotsEnabled() ? new LocalSnapshotManagerImpl(snapshotProvider, snapshotService, transactionPruner, configuration, inSyncService) : null; } @Singleton @Provides TransactionValidator provideTransactionValidator(SnapshotProvider snapshotProvider, TransactionRequester transactionRequester) { return new TransactionValidator(snapshotProvider, transactionRequester, configuration); } @Singleton @Provides TransactionSolidifier provideTransactionSolidifier(Tangle tangle, SnapshotProvider snapshotProvider, TransactionRequester transactionRequester, TipsViewModel tipsViewModel){ return new TransactionSolidifierImpl(tangle, snapshotProvider, transactionRequester, tipsViewModel, configuration.getCoordinator()); } @Singleton @Provides TipSelector provideTipSelector(Tangle tangle, SnapshotProvider snapshotProvider, MilestoneSolidifier milestoneSolidifier, LedgerService ledgerService) { EntryPointSelector entryPointSelector = new EntryPointSelectorImpl(tangle, snapshotProvider, milestoneSolidifier); RatingCalculator ratingCalculator = new CumulativeWeightCalculator(tangle, snapshotProvider); TailFinder tailFinder = new TailFinderImpl(tangle); Walker walker = new WalkerAlpha(tailFinder, tangle, new SecureRandom(), configuration); return new TipSelectorImpl(tangle, snapshotProvider, ledgerService, entryPointSelector, ratingCalculator, walker, configuration); } @Singleton @Provides Iota provideIota(SpentAddressesProvider spentAddressesProvider, SpentAddressesService spentAddressesService, SnapshotProvider snapshotProvider, SnapshotService snapshotService, @Nullable LocalSnapshotManager localSnapshotManager, MilestoneService milestoneService, SeenMilestonesRetriever seenMilestonesRetriever, LedgerService ledgerService, @Nullable TransactionPruner transactionPruner, MilestoneSolidifier milestoneSolidifier, BundleValidator bundleValidator, Tangle tangle, TransactionValidator transactionValidator, TransactionRequester transactionRequester, NeighborRouter neighborRouter, TransactionProcessingPipeline transactionProcessingPipeline, TipsRequester tipsRequester, TipsViewModel tipsViewModel, TipSelector tipsSelector, LocalSnapshotsPersistenceProvider localSnapshotsDb, TransactionSolidifier transactionSolidifier) { return new Iota(configuration, spentAddressesProvider, spentAddressesService, snapshotProvider, snapshotService, localSnapshotManager, milestoneService, seenMilestonesRetriever, ledgerService, transactionPruner, milestoneSolidifier, bundleValidator, tangle, transactionValidator, transactionRequester, neighborRouter, transactionProcessingPipeline, tipsRequester, tipsViewModel, tipsSelector, localSnapshotsDb, transactionSolidifier); } @Singleton @Provides IXI provideIxi(Iota iota) { return new IXI(iota); } @Singleton @Provides API provideApi(IXI ixi, TransactionRequester transactionRequester, SpentAddressesService spentAddressesService, Tangle tangle, BundleValidator bundleValidator, SnapshotProvider snapshotProvider, LedgerService ledgerService, NeighborRouter neighborRouter, TipSelector tipsSelector, TipsViewModel tipsViewModel, TransactionValidator transactionValidator, MilestoneSolidifier milestoneSolidifier, TransactionProcessingPipeline txPipeline, TransactionSolidifier transactionSolidifier) { return new API(configuration, ixi, transactionRequester, spentAddressesService, tangle, bundleValidator, snapshotProvider, ledgerService, neighborRouter, tipsSelector, tipsViewModel, transactionValidator, milestoneSolidifier, txPipeline, transactionSolidifier); } @Singleton @Provides LocalSnapshotsPersistenceProvider provideLocalSnapshotsPersistenceProvider(){ return new LocalSnapshotsPersistenceProvider(new RocksDBPersistenceProvider( configuration.getLocalSnapshotsDbPath(), configuration.getLocalSnapshotsDbLogPath(), configuration.getDbConfigFile(), 1000, LocalSnapshotsPersistenceProvider.COLUMN_FAMILIES, null)); } @Override protected void configure() { // beans that only need a default constructor bind(Tangle.class).asEagerSingleton(); bind(BundleValidator.class).asEagerSingleton(); bind(TipsViewModel.class).asEagerSingleton(); } }
11,249
51.570093
269
java
iri
iri-master/src/main/java/com/iota/iri/OsVariants.java
package com.iota.iri; public enum OsVariants { Windows, Unix; }
73
9.571429
24
java
iri
iri-master/src/main/java/com/iota/iri/SignedFiles.java
package com.iota.iri; import com.iota.iri.crypto.Curl; import com.iota.iri.crypto.ISS; import com.iota.iri.crypto.Sponge; import com.iota.iri.crypto.SpongeFactory; import com.iota.iri.utils.Converter; import org.apache.commons.lang3.ArrayUtils; import java.io.*; import java.util.Arrays; public class SignedFiles { public static boolean isFileSignatureValid(String filename, String signatureFilename, String publicKey, int depth, int index) throws IOException { byte[] signature = digestFile(filename, SpongeFactory.create(SpongeFactory.Mode.KERL)); return validateSignature(signatureFilename, publicKey, depth, index, signature); } private static boolean validateSignature(String signatureFilename, String publicKey, int depth, int index, byte[] digest) throws IOException { //validate signature SpongeFactory.Mode mode = SpongeFactory.Mode.CURLP81; byte[] digests = new byte[0]; byte[] bundle = ISS.normalizedBundle(digest); byte[] root; int i; try (InputStream inputStream = SignedFiles.class.getResourceAsStream(signatureFilename); BufferedReader reader = new BufferedReader((inputStream == null) ? new FileReader(signatureFilename) : new InputStreamReader(inputStream))) { String line; for (i = 0; i < 3 && (line = reader.readLine()) != null; i++) { byte[] lineTrits = Converter.allocateTritsForTrytes(line.length()); Converter.trits(line, lineTrits, 0); byte[] normalizedBundleFragment = Arrays.copyOfRange(bundle, i * ISS.NORMALIZED_FRAGMENT_LENGTH, (i + 1) * ISS.NORMALIZED_FRAGMENT_LENGTH); byte[] issDigest = ISS.digest(mode, normalizedBundleFragment, lineTrits); digests = ArrayUtils.addAll(digests, issDigest); } if ((line = reader.readLine()) != null) { byte[] lineTrits = Converter.allocateTritsForTrytes(line.length()); Converter.trits(line, lineTrits, 0); root = ISS.getMerkleRoot(mode, ISS.address(mode, digests), lineTrits, 0, index, depth); } else { root = ISS.address(mode, digests); } byte[] pubkeyTrits = Converter.allocateTritsForTrytes(publicKey.length()); Converter.trits(publicKey, pubkeyTrits, 0); return Arrays.equals(pubkeyTrits, root); // valid } } private static byte[] digestFile(String filename, Sponge curl) throws IOException { try (InputStream inputStream = SignedFiles.class.getResourceAsStream(filename); BufferedReader reader = new BufferedReader((inputStream == null) ? new FileReader(filename) : new InputStreamReader(inputStream))) { byte[] buffer = new byte[Curl.HASH_LENGTH * 3]; reader.lines().forEach(line -> { String trytes = Converter.asciiToTrytes(line); // can return a null if (trytes == null) { throw new IllegalArgumentException("TRYTES IS NULL. INPUT= '" + line + "'"); } Converter.trits(trytes, buffer, 0); curl.absorb(buffer, 0, buffer.length); Arrays.fill(buffer, (byte) 0); }); byte[] signature = new byte[Curl.HASH_LENGTH]; curl.squeeze(signature, 0, Curl.HASH_LENGTH); return signature; } catch (UncheckedIOException e) { throw e.getCause(); } } }
3,560
44.075949
155
java
iri
iri-master/src/main/java/com/iota/iri/conf/APIConfig.java
package com.iota.iri.conf; import java.net.InetAddress; import java.util.List; /** * Configurations for node API */ public interface APIConfig extends Config { /** * Default Value: {@value BaseIotaConfig.Defaults#PORT} * * @return {@value APIConfig.Descriptions#PORT} */ int getPort(); /** * Default Value: {@value BaseIotaConfig.Defaults#API_HOST} * * @return {@value APIConfig.Descriptions#API_HOST} */ String getApiHost(); /** * Default Value: {@link BaseIotaConfig.Defaults#REMOTE_LIMIT_API} * * @return {@value APIConfig.Descriptions#REMOTE_LIMIT_API} */ List<String> getRemoteLimitApi(); /** * Default Value: {@link BaseIotaConfig.Defaults#REMOTE_TRUSTED_API_HOSTS} * @return {@value Descriptions#REMOTE_TRUSTED_API_HOSTS} */ List<InetAddress> getRemoteTrustedApiHosts(); /** * Default Value: {@value BaseIotaConfig.Defaults#MAX_FIND_TRANSACTIONS} * * @return {@value APIConfig.Descriptions#MAX_FIND_TRANSACTIONS} */ int getMaxFindTransactions(); /** * Default Value: {@value BaseIotaConfig.Defaults#MAX_REQUESTS_LIST} * * @return {@value APIConfig.Descriptions#MAX_REQUESTS_LIST} */ int getMaxRequestsList(); /** * Default Value: {@value BaseIotaConfig.Defaults#MAX_GET_TRYTES} * * @return {@value APIConfig.Descriptions#MAX_GET_TRYTES} */ int getMaxGetTrytes(); /** * Default Value: {@value BaseIotaConfig.Defaults#MAX_BODY_LENGTH} * * @return {@value APIConfig.Descriptions#MAX_BODY_LENGTH} */ int getMaxBodyLength(); /** * Default Value: {@value BaseIotaConfig.Defaults#REMOTE_AUTH} * * @return {@value APIConfig.Descriptions#REMOTE_AUTH} */ String getRemoteAuth(); /** * These descriptions are used by JCommander when you enter <code>java iri.jar --help</code> at the command line. */ interface Descriptions { String PORT = "The port that will be used by the API."; String API_HOST = "The host on which the API will listen to. Set to 0.0.0.0 to accept any host."; String REMOTE_LIMIT_API = "Commands that should be ignored by API."; String REMOTE_TRUSTED_API_HOSTS = "Open the API interface to defined hosts. You can specify multiple hosts in a comma separated list \"--remote-trusted-api-hosts 192.168.0.55,10.0.0.10\". You must also provide the \"--remote\" parameter. Warning: \"--remote-limit-api\" will have no effect for these hosts."; String REMOTE_AUTH = "A string in the form of <user>:<password>. Used to access the API. You can provide a clear text or an hashed password."; String MAX_FIND_TRANSACTIONS = "The maximal number of transactions that may be returned by the \"findTransactions\" API call. If the number of transactions found exceeds this number an error will be returned."; String MAX_REQUESTS_LIST = "The maximal number of parameters one can place in an API call. If the number parameters exceeds this number an error will be returned"; String MAX_GET_TRYTES = "The maximal number of trytes that may be returned by the \"getTrytes\" API call. If the number of transactions found exceeds this number an error will be returned."; String MAX_BODY_LENGTH = "The maximal number of characters the body of an API call may hold. If a request body length exceeds this number an error will be returned."; String REMOTE = "Open the API interface to any host. Equivalent to \"--api-host 0.0.0.0\""; } }
3,590
38.461538
316
java
iri
iri-master/src/main/java/com/iota/iri/conf/BaseIotaConfig.java
package com.iota.iri.conf; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; import org.apache.commons.lang3.ArrayUtils; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.iota.iri.crypto.SpongeFactory; import com.iota.iri.model.Hash; import com.iota.iri.model.HashFactory; import com.iota.iri.utils.IotaUtils; /** Note: the fields in this class are being deserialized from Jackson so they must follow Java Bean convention. Meaning that every field must have a getter that is prefixed with `get` unless it is a boolean and then it should be prefixed with `is`. */ public abstract class BaseIotaConfig implements IotaConfig { public static final String SPLIT_STRING_TO_LIST_REGEX = ",| "; private boolean help; private boolean testnet = false; //API protected int port = Defaults.PORT; protected String apiHost = Defaults.API_HOST; protected List<String> remoteLimitApi = Defaults.REMOTE_LIMIT_API; protected List<InetAddress> remoteTrustedApiHosts = Defaults.REMOTE_LIMIT_API_HOSTS; protected int maxFindTransactions = Defaults.MAX_FIND_TRANSACTIONS; protected int maxRequestsList = Defaults.MAX_REQUESTS_LIST; protected int maxGetTrytes = Defaults.MAX_GET_TRYTES; protected int maxBodyLength = Defaults.MAX_BODY_LENGTH; protected String remoteAuth = Defaults.REMOTE_AUTH; //We don't have a REMOTE config but we have a remote flag. We must add a field for JCommander private boolean remote; //Network protected String neighboringSocketAddress = Defaults.NEIGHBORING_SOCKET_ADDRESS; protected int neighboringSocketPort = Defaults.NEIGHBORING_SOCKET_PORT; protected int reconnectAttemptIntervalSeconds = Defaults.RECONNECT_ATTEMPT_INTERVAL_SECONDS; protected boolean autoTetheringEnabled = Defaults.AUTO_TETHERING_ENABLED; protected double pDropCacheEntry = Defaults.P_DROP_CACHE_ENTRY; protected int sendLimit = Defaults.SEND_LIMIT; protected int maxNeighbors = Defaults.MAX_NEIGHBORS; protected boolean dnsRefresherEnabled = Defaults.DNS_REFRESHER_ENABLED; protected boolean dnsResolutionEnabled = Defaults.DNS_RESOLUTION_ENABLED; protected List<String> neighbors = Collections.EMPTY_LIST; //IXI protected String ixiDir = Defaults.IXI_DIR; //DB protected String dbPath = Defaults.DB_PATH; protected String dbLogPath = Defaults.DB_LOG_PATH; protected String dbConfigFile = Defaults.DB_CONFIG_FILE; protected int dbCacheSize = Defaults.DB_CACHE_SIZE; //KB protected String mainDb = Defaults.MAIN_DB; protected boolean revalidate = Defaults.REVALIDATE; protected boolean rescanDb = Defaults.RESCAN_DB; //Protocol protected double pSendMilestone = Defaults.P_SEND_MILESTONE; //ZMQ protected boolean zmqEnableTcp = Defaults.ZMQ_ENABLE_TCP; protected boolean zmqEnableIpc = Defaults.ZMQ_ENABLE_IPC; protected int zmqPort = Defaults.ZMQ_PORT; protected int zmqThreads = Defaults.ZMQ_THREADS; protected String zmqIpc = Defaults.ZMQ_IPC; protected int qSizeNode = Defaults.QUEUE_SIZE; protected int cacheSizeBytes = Defaults.CACHE_SIZE_BYTES; /** * @deprecated This field was replaced by {@link #zmqEnableTcp} and {@link #zmqEnableIpc}. It is only needed * for backward compatibility to --zmq-enabled parameter with JCommander. */ @Deprecated private boolean zmqEnabled; //Tip Selection protected int maxDepth = Defaults.MAX_DEPTH; protected double alpha = Defaults.ALPHA; protected int tipSelectionTimeoutSec = Defaults.TIP_SELECTION_TIMEOUT_SEC; private int maxAnalyzedTransactions = Defaults.BELOW_MAX_DEPTH_TRANSACTION_LIMIT; //PearlDiver protected int powThreads = Defaults.POW_THREADS; //Snapshot protected boolean localSnapshotsEnabled = Defaults.LOCAL_SNAPSHOTS_ENABLED; protected boolean localSnapshotsPruningEnabled = Defaults.LOCAL_SNAPSHOTS_PRUNING_ENABLED; protected int localSnapshotsPruningDelay = Defaults.LOCAL_SNAPSHOTS_PRUNING_DELAY; protected int localSnapshotsIntervalSynced = Defaults.LOCAL_SNAPSHOTS_INTERVAL_SYNCED; protected int localSnapshotsIntervalUnsynced = Defaults.LOCAL_SNAPSHOTS_INTERVAL_UNSYNCED; protected String localSnapshotsDbMaxSize = Defaults.LOCAL_SNAPSHOTS_DB_MAX_SIZE; //Human readable protected int localSnapshotsDepth = Defaults.LOCAL_SNAPSHOTS_DEPTH; protected String localSnapshotsDbPath = Defaults.LOCAL_SNAPSHOTS_DB_PATH; protected String localSnapshotsDbLogPath = Defaults.LOCAL_SNAPSHOTS_DB_LOG_PATH; //Solidification protected boolean printSyncProgressEnabled = Defaults.PRINT_SYNC_PROGRESS_ENABLED; public BaseIotaConfig() { //empty constructor } @Override public JCommander parseConfigFromArgs(String[] args) throws ParameterException { //One can invoke help via INI file (feature/bug) so we always create JCommander even if args is empty JCommander jCommander = JCommander.newBuilder() .addObject(this) //This is in order to enable the `--conf` option .acceptUnknownOptions(true) .allowParameterOverwriting(true) //This is the first line of JCommander Usage .programName("java -jar iri-" + IotaUtils.getIriVersion() + ".jar") .build(); if (ArrayUtils.isNotEmpty(args)) { jCommander.parse(args); } return jCommander; } @Override public boolean isHelp() { return help; } @Override public boolean isTestnet() { return testnet; } @JsonIgnore @Parameter(names = {Config.TESTNET_FLAG}, description = Config.Descriptions.TESTNET, arity = 1) protected void setTestnet(boolean testnet) { this.testnet = testnet; } @JsonProperty @Parameter(names = {"--help", "-h"}, help = true, hidden = true) public void setHelp(boolean help) { this.help = help; } @Override public int getPort() { return port; } @JsonProperty @Parameter(names = {"--port", "-p"}, description = APIConfig.Descriptions.PORT) public void setPort(int port) { this.port = port; } @Override public String getApiHost() { if (remote) { return "0.0.0.0"; } return apiHost; } @JsonProperty @Parameter(names = {"--api-host"}, description = APIConfig.Descriptions.API_HOST) protected void setApiHost(String apiHost) { this.apiHost = apiHost; } @JsonIgnore @Parameter(names = {"--remote"}, description = APIConfig.Descriptions.REMOTE, arity = 1) protected void setRemote(boolean remote) { this.remote = remote; } @Override public List<String> getRemoteLimitApi() { return remoteLimitApi; } @JsonProperty @Parameter(names = {"--remote-limit-api"}, description = APIConfig.Descriptions.REMOTE_LIMIT_API) protected void setRemoteLimitApi(String remoteLimitApi) { this.remoteLimitApi = IotaUtils.splitStringToImmutableList(remoteLimitApi, SPLIT_STRING_TO_LIST_REGEX); } @Override public List<InetAddress> getRemoteTrustedApiHosts() { return remoteTrustedApiHosts; } @JsonProperty @Parameter(names = {"--remote-trusted-api-hosts"}, description = APIConfig.Descriptions.REMOTE_TRUSTED_API_HOSTS) public void setRemoteTrustedApiHosts(String remoteTrustedApiHosts) { List<String> addresses = IotaUtils.splitStringToImmutableList(remoteTrustedApiHosts, SPLIT_STRING_TO_LIST_REGEX); List<InetAddress> inetAddresses = addresses.stream().map(host -> { try { return InetAddress.getByName(host.trim()); } catch (UnknownHostException e) { throw new ParameterException("Invalid value for --remote-trusted-api-hosts address: ", e); } }).collect(Collectors.toList()); // always make sure that localhost exists as trusted host if(!inetAddresses.contains(Defaults.REMOTE_TRUSTED_API_HOSTS)) { inetAddresses.add(Defaults.REMOTE_TRUSTED_API_HOSTS); } this.remoteTrustedApiHosts = Collections.unmodifiableList(inetAddresses); } @Override public int getMaxFindTransactions() { return maxFindTransactions; } @JsonProperty @Parameter(names = {"--max-find-transactions"}, description = APIConfig.Descriptions.MAX_FIND_TRANSACTIONS) protected void setMaxFindTransactions(int maxFindTransactions) { this.maxFindTransactions = maxFindTransactions; } @Override public int getMaxRequestsList() { return maxRequestsList; } @JsonProperty @Parameter(names = {"--max-requests-list"}, description = APIConfig.Descriptions.MAX_REQUESTS_LIST) protected void setMaxRequestsList(int maxRequestsList) { this.maxRequestsList = maxRequestsList; } @Override public int getMaxGetTrytes() { return maxGetTrytes; } @JsonProperty @Parameter(names = {"--max-get-trytes"}, description = APIConfig.Descriptions.MAX_GET_TRYTES) protected void setMaxGetTrytes(int maxGetTrytes) { this.maxGetTrytes = maxGetTrytes; } @Override public int getMaxBodyLength() { return maxBodyLength; } @JsonProperty @Parameter(names = {"--max-body-length"}, description = APIConfig.Descriptions.MAX_BODY_LENGTH) protected void setMaxBodyLength(int maxBodyLength) { this.maxBodyLength = maxBodyLength; } @Override public String getRemoteAuth() { return remoteAuth; } @JsonProperty @Parameter(names = {"--remote-auth"}, description = APIConfig.Descriptions.REMOTE_AUTH) protected void setRemoteAuth(String remoteAuth) { this.remoteAuth = remoteAuth; } @JsonProperty @Parameter(names = {"--neighboring-socket-address"}, description = NetworkConfig.Descriptions.NEIGHBORING_SOCKET_ADDRESS) public void setNeighboringSocketAddress(String neighboringSocketAddress) { this.neighboringSocketAddress = neighboringSocketAddress; } @Override public String getNeighboringSocketAddress() { return neighboringSocketAddress; } @JsonProperty @Parameter(names = {"--neighboring-socket-port", "-t"}, description = NetworkConfig.Descriptions.NEIGHBORING_SOCKET_PORT) public void setNeighboringSocketPort(int neighboringSocketPort) { this.neighboringSocketPort = neighboringSocketPort; } @Override public int getNeighboringSocketPort() { return neighboringSocketPort; } @Override public int getReconnectAttemptIntervalSeconds() { return reconnectAttemptIntervalSeconds; } @JsonProperty @Parameter(names = {"--reconnect-attempt-interval-seconds"}, description = NetworkConfig.Descriptions.RECONNECT_ATTEMPT_INTERVAL_SECONDS) protected void setReconnectAttemptIntervalSeconds(int reconnectAttemptIntervalSeconds) { this.reconnectAttemptIntervalSeconds = reconnectAttemptIntervalSeconds; } @Override public boolean isAutoTetheringEnabled() { return autoTetheringEnabled; } @JsonProperty @Parameter(names = {"--auto-tethering"}, description = NetworkConfig.Descriptions.AUTO_TETHERING_ENABLED, arity = 1) protected void setAutoTetheringEnabled(boolean autoTetheringEnabled) { this.autoTetheringEnabled = autoTetheringEnabled; } @Override public int getSendLimit() { return sendLimit; } @JsonProperty @Parameter(names = {"--send-limit"}, description = NetworkConfig.Descriptions.SEND_LIMIT) protected void setSendLimit(int sendLimit) { this.sendLimit = sendLimit; } @Override public int getMaxNeighbors() { return maxNeighbors; } @JsonProperty @Parameter(names = {"--max-neighbors"}, description = NetworkConfig.Descriptions.MAX_NEIGHBORS) protected void setMaxNeighbors(int maxNeighbors) { this.maxNeighbors = maxNeighbors; } @Override public boolean isDnsRefresherEnabled() { return dnsRefresherEnabled; } @JsonProperty @Parameter(names = {"--dns-refresher"}, description = NetworkConfig.Descriptions.DNS_REFRESHER_ENABLED, arity = 1) protected void setDnsRefresherEnabled(boolean dnsRefresherEnabled) { this.dnsRefresherEnabled = dnsRefresherEnabled; } @Override public boolean isDnsResolutionEnabled() { return dnsResolutionEnabled; } @JsonProperty @Parameter(names = {"--dns-resolution"}, description = NetworkConfig.Descriptions.DNS_RESOLUTION_ENABLED, arity = 1) protected void setDnsResolutionEnabled(boolean dnsResolutionEnabled) { this.dnsResolutionEnabled = dnsResolutionEnabled; } @Override public List<String> getNeighbors() { return neighbors; } @JsonProperty @Parameter(names = {"-n", "--neighbors"}, description = NetworkConfig.Descriptions.NEIGHBORS) protected void setNeighbors(String neighbors) { this.neighbors = IotaUtils.splitStringToImmutableList(neighbors, SPLIT_STRING_TO_LIST_REGEX); } @Override public String getIxiDir() { return ixiDir; } @JsonProperty @Parameter(names = {"--ixi-dir"}, description = IXIConfig.Descriptions.IXI_DIR) protected void setIxiDir(String ixiDir) { this.ixiDir = ixiDir; } @Override public String getDbPath() { return dbPath; } @JsonProperty @Parameter(names = {"--db-path"}, description = DbConfig.Descriptions.DB_PATH) protected void setDbPath(String dbPath) { this.dbPath = dbPath; } @Override public String getDbLogPath() { return dbLogPath; } @JsonProperty @Parameter(names = {"--db-log-path"}, description = DbConfig.Descriptions.DB_LOG_PATH) protected void setDbLogPath(String dbLogPath) { this.dbLogPath = dbLogPath; } @Override public String getDbConfigFile() { return dbConfigFile; } @JsonProperty @Parameter(names = {"--db-config-file"}, description = DbConfig.Descriptions.DB_CONFIG_FILE) protected void setDbConfigFile(String dbConfigFile) { this.dbConfigFile = dbConfigFile; } @Override public int getDbCacheSize() { return dbCacheSize; } @JsonProperty @Parameter(names = {"--db-cache-size"}, description = DbConfig.Descriptions.DB_CACHE_SIZE) protected void setDbCacheSize(int dbCacheSize) { this.dbCacheSize = dbCacheSize; } @Override public String getMainDb() { return mainDb; } @JsonProperty @Parameter(names = {"--db"}, description = DbConfig.Descriptions.MAIN_DB) protected void setMainDb(String mainDb) { this.mainDb = mainDb; } @Override public boolean isRevalidate() { return revalidate; } @JsonProperty @Parameter(names = {"--revalidate"}, description = DbConfig.Descriptions.REVALIDATE, arity = 1) protected void setRevalidate(boolean revalidate) { this.revalidate = revalidate; } @Override public boolean isRescanDb() { return rescanDb; } @JsonProperty @Parameter(names = {"--rescan"}, description = DbConfig.Descriptions.RESCAN_DB, arity = 1) protected void setRescanDb(boolean rescanDb) { this.rescanDb = rescanDb; } @Override public int getMwm() { return Defaults.MWM; } @Override public int getRequestHashSize() { return Defaults.REQUEST_HASH_SIZE; } @Override public double getpSendMilestone() { return pSendMilestone; } @JsonProperty @Parameter(names = {"--p-send-milestone"}, description = ProtocolConfig.Descriptions.P_SEND_MILESTONE) protected void setpSendMilestone(double pSendMilestone) { this.pSendMilestone = pSendMilestone; } @Override public boolean getLocalSnapshotsEnabled() { return this.localSnapshotsEnabled; } @JsonProperty @Parameter(names = {"--local-snapshots-enabled"}, description = SnapshotConfig.Descriptions.LOCAL_SNAPSHOTS_ENABLED, arity = 1) protected void setLocalSnapshotsEnabled(boolean localSnapshotsEnabled) { this.localSnapshotsEnabled = localSnapshotsEnabled; } @Override public boolean getLocalSnapshotsPruningEnabled() { return this.localSnapshotsEnabled && this.localSnapshotsPruningEnabled; } @JsonProperty @Parameter(names = {"--local-snapshots-pruning-enabled"}, description = SnapshotConfig.Descriptions.LOCAL_SNAPSHOTS_PRUNING_ENABLED, arity = 1) protected void setLocalSnapshotsPruningEnabled(boolean localSnapshotsPruningEnabled) { this.localSnapshotsPruningEnabled = localSnapshotsPruningEnabled; } @Override public int getLocalSnapshotsPruningDelay() { return this.localSnapshotsPruningDelay; } @JsonProperty @Parameter(names = {"--local-snapshots-pruning-delay"}, description = SnapshotConfig.Descriptions.LOCAL_SNAPSHOTS_PRUNING_DELAY) protected void setLocalSnapshotsPruningDelay(int localSnapshotsPruningDelay) { if (localSnapshotsPruningDelay < Defaults.LOCAL_SNAPSHOTS_PRUNING_DELAY_MIN) { throw new ParameterException("LOCAL_SNAPSHOTS_PRUNING_DELAY should be at least " + Defaults.LOCAL_SNAPSHOTS_PRUNING_DELAY_MIN + "(found " + localSnapshotsPruningDelay +")"); } this.localSnapshotsPruningDelay = localSnapshotsPruningDelay; } @Override public int getLocalSnapshotsIntervalSynced() { return this.localSnapshotsIntervalSynced; } @JsonProperty @Parameter(names = {"--local-snapshots-interval-synced"}, description = SnapshotConfig.Descriptions.LOCAL_SNAPSHOTS_INTERVAL_SYNCED) protected void setLocalSnapshotsIntervalSynced(int localSnapshotsIntervalSynced) { if (localSnapshotsIntervalSynced < 1) { throw new ParameterException("LOCAL_SNAPSHOTS_INTERVAL_SYNCED should be at least 1 (found " + localSnapshotsIntervalSynced + ")"); } this.localSnapshotsIntervalSynced = localSnapshotsIntervalSynced; } @Override public int getLocalSnapshotsIntervalUnsynced() { return this.localSnapshotsIntervalUnsynced; } @JsonProperty @Parameter(names = {"--local-snapshots-interval-unsynced"}, description = SnapshotConfig.Descriptions.LOCAL_SNAPSHOTS_INTERVAL_UNSYNCED) protected void setLocalSnapshotsIntervalUnsynced(int localSnapshotsIntervalUnsynced) { if (localSnapshotsIntervalUnsynced < 1) { throw new ParameterException("LOCAL_SNAPSHOTS_INTERVAL_UNSYNCED should be at least 1 (found " + localSnapshotsIntervalUnsynced + ")"); } this.localSnapshotsIntervalUnsynced = localSnapshotsIntervalUnsynced; } @Override public int getLocalSnapshotsDepth() { return this.localSnapshotsDepth; } @JsonProperty @Parameter(names = {"--local-snapshots-depth"}, description = SnapshotConfig.Descriptions.LOCAL_SNAPSHOTS_DEPTH) protected void setLocalSnapshotsDepth(int localSnapshotsDepth) { if (localSnapshotsDepth < Defaults.LOCAL_SNAPSHOTS_DEPTH_MIN) { throw new ParameterException("LOCAL_SNAPSHOTS_DEPTH should be at least " + Defaults.LOCAL_SNAPSHOTS_DEPTH_MIN + "(found " + localSnapshotsDepth + ")"); } this.localSnapshotsDepth = localSnapshotsDepth; } @Override public String getLocalSnapshotsDbMaxSize() { return localSnapshotsDbMaxSize; } @JsonProperty @Parameter(names = {"--local-snapshots-db-max-size"}, description = SnapshotConfig.Descriptions.LOCAL_SNAPSHOTS_DB_MAX_SIZE) protected void setLocalSnapshotsDbMaxSize(String dbMaxSize) { this.localSnapshotsDbMaxSize = dbMaxSize; } @Override public long getSnapshotTime() { return Defaults.SNAPSHOT_TIME; } @Override public String getSnapshotFile() { return Defaults.SNAPSHOT_FILE; } @Override public String getSnapshotSignatureFile() { return Defaults.SNAPSHOT_SIGNATURE_FILE; } @Override public String getPreviousEpochSpentAddressesFiles() { return Defaults.PREVIOUS_EPOCHS_SPENT_ADDRESSES_FILE; } @Override public int getMilestoneStartIndex() { return Defaults.MILESTONE_START_INDEX; } @Override public int getMaxMilestoneIndex() { return Defaults.MAX_MILESTONE_INDEX; } @Override public int getNumberOfKeysInMilestone() { return Defaults.NUMBER_OF_KEYS_IN_A_MILESTONE; } @Override public String getLocalSnapshotsDbPath() { return localSnapshotsDbPath; } @JsonProperty @Parameter(names = {"--localsnapshots-db-path"}, description = SnapshotConfig.Descriptions.LOCAL_SNAPSHOTS_DB_PATH) protected void setLocalSnapshotsDbPath(String localSnapshotsDbPath) { this.localSnapshotsDbPath = localSnapshotsDbPath; } @Override public String getLocalSnapshotsDbLogPath() { return localSnapshotsDbLogPath; } @JsonProperty @Parameter(names = {"--localsnapshots-db-log-path"}, description = SnapshotConfig.Descriptions.LOCAL_SNAPSHOTS_DB_LOG_PATH) protected void setLocalSnapshotsDbLogPath(String localSnapshotsDbLogPath) { this.localSnapshotsDbLogPath = localSnapshotsDbLogPath; } /** * Checks if ZMQ is enabled. * @return true if zmqEnableTcp or zmqEnableIpc is set. */ @Override public boolean isZmqEnabled() { return zmqEnableTcp || zmqEnableIpc; } /** * Activates ZMQ to listen on TCP and IPC. * @deprecated Use {@link #setZmqEnableTcp(boolean) and/or {@link #setZmqEnableIpc(boolean)}} instead. * @param zmqEnabled true if ZMQ should listen in TCP and IPC. */ @Deprecated @JsonProperty @Parameter(names = "--zmq-enabled", description = ZMQConfig.Descriptions.ZMQ_ENABLED, arity = 1) protected void setZmqEnabled(boolean zmqEnabled) { this.zmqEnableTcp = zmqEnabled; this.zmqEnableIpc = zmqEnabled; } @Override public boolean isZmqEnableTcp() { return zmqEnableTcp; } @JsonProperty @Parameter(names = "--zmq-enable-tcp", description = ZMQConfig.Descriptions.ZMQ_ENABLE_TCP, arity = 1) public void setZmqEnableTcp(boolean zmqEnableTcp) { this.zmqEnableTcp = zmqEnableTcp; } @Override public boolean isZmqEnableIpc() { return zmqEnableIpc; } @JsonProperty @Parameter(names = "--zmq-enable-ipc", description = ZMQConfig.Descriptions.ZMQ_ENABLE_IPC, arity = 1) public void setZmqEnableIpc(boolean zmqEnableIpc) { this.zmqEnableIpc = zmqEnableIpc; } @Override public int getZmqPort() { return zmqPort; } @JsonProperty @Parameter(names = "--zmq-port", description = ZMQConfig.Descriptions.ZMQ_PORT) protected void setZmqPort(int zmqPort) { this.zmqPort = zmqPort; this.zmqEnableTcp = true; } @Override public int getZmqThreads() { return zmqThreads; } @JsonProperty @Parameter(names = "--zmq-threads", description = ZMQConfig.Descriptions.ZMQ_THREADS) protected void setZmqThreads(int zmqThreads) { this.zmqThreads = zmqThreads; } @Override public String getZmqIpc() { return zmqIpc; } @JsonProperty @Parameter(names = "--zmq-ipc", description = ZMQConfig.Descriptions.ZMQ_IPC) protected void setZmqIpc(String zmqIpc) { this.zmqIpc = zmqIpc; this.zmqEnableIpc = true; } @Override public int getqSizeNode() { return qSizeNode; } @JsonProperty @Parameter(names = "--queue-size", description = NetworkConfig.Descriptions.Q_SIZE_NODE) protected void setqSizeNode(int qSizeNode) { this.qSizeNode = qSizeNode; } @Override public double getpDropCacheEntry() { return pDropCacheEntry; } @JsonProperty @Parameter(names = "--p-drop-cache", description = NetworkConfig.Descriptions.P_DROP_CACHE_ENTRY) protected void setpDropCacheEntry(double pDropCacheEntry) { this.pDropCacheEntry = pDropCacheEntry; } @Override public int getCacheSizeBytes() { return cacheSizeBytes; } @JsonProperty @Parameter(names = "--cache-size", description = NetworkConfig.Descriptions.CACHE_SIZE_BYTES) protected void setCacheSizeBytes(int cacheSizeBytes) { this.cacheSizeBytes = cacheSizeBytes; } @Override public Hash getCoordinator() { return Defaults.COORDINATOR; } @Override public int getCoordinatorSecurityLevel() { return Defaults.COORDINATOR_SECURITY_LEVEL; } @Override public SpongeFactory.Mode getCoordinatorSignatureMode() { return Defaults.COORDINATOR_SIGNATURE_MODE; } @Override public boolean isDontValidateTestnetMilestoneSig() { return false; } @Override public int getMaxDepth() { return maxDepth; } @JsonProperty @Parameter(names = "--max-depth", description = TipSelConfig.Descriptions.MAX_DEPTH) protected void setMaxDepth(int maxDepth) { this.maxDepth = maxDepth; } @Override public double getAlpha() { return alpha; } @JsonProperty("TIPSELECTION_ALPHA") @Parameter(names = "--alpha", description = TipSelConfig.Descriptions.ALPHA) protected void setAlpha(double alpha) { this.alpha = alpha; } @Override public int getTipSelectionTimeoutSec() { return tipSelectionTimeoutSec; } @JsonProperty @Parameter(names = "--tip-selection-timeout-sec", description = TipSelConfig.Descriptions.TIP_SELECTION_TIMEOUT_SEC) protected void setTipSelectionTimeoutSec(int tipSelectionTimeoutSec) { this.tipSelectionTimeoutSec = tipSelectionTimeoutSec; } @Override public int getBelowMaxDepthTransactionLimit() { return maxAnalyzedTransactions; } @JsonProperty @Parameter(names = "--max-analyzed-transactions", description = TipSelConfig.Descriptions.BELOW_MAX_DEPTH_TRANSACTION_LIMIT) protected void setBelowMaxDepthTransactionLimit(int maxAnalyzedTransactions) { this.maxAnalyzedTransactions = maxAnalyzedTransactions; } @Override public int getPowThreads() { return powThreads; } @JsonProperty @Parameter(names = "--pow-threads", description = PearlDiverConfig.Descriptions.POW_THREADS) protected void setPowThreads(int powThreads) { this.powThreads = powThreads; } @Override public boolean isPrintSyncProgressEnabled() { return printSyncProgressEnabled; } @JsonProperty @Parameter(names = {"--print-sync-progress"}, description = SolidificationConfig.Descriptions.PRINT_SYNC_PROGRESS_ENABLED, arity = 1) protected void setPrintSyncProgressEnabled(boolean printSyncProgressEnabled) { this.printSyncProgressEnabled = printSyncProgressEnabled; } /** * Represents the default values primarily used by the {@link BaseIotaConfig} field initialisation. */ public interface Defaults { //API int PORT = 14265; String API_HOST = "localhost"; List<String> REMOTE_LIMIT_API = IotaUtils.createImmutableList("addNeighbors", "getNeighbors", "removeNeighbors", "attachToTangle", "interruptAttachingToTangle"); InetAddress REMOTE_TRUSTED_API_HOSTS = InetAddress.getLoopbackAddress(); List<InetAddress> REMOTE_LIMIT_API_HOSTS = IotaUtils.createImmutableList(REMOTE_TRUSTED_API_HOSTS); int MAX_FIND_TRANSACTIONS = 100_000; int MAX_REQUESTS_LIST = 1_000; int MAX_GET_TRYTES = 10_000; int MAX_BODY_LENGTH = 1_000_000; String REMOTE_AUTH = ""; //Network String NEIGHBORING_SOCKET_ADDRESS = "0.0.0.0"; int NEIGHBORING_SOCKET_PORT = 15600; int RECONNECT_ATTEMPT_INTERVAL_SECONDS = 60; boolean AUTO_TETHERING_ENABLED = false; int SEND_LIMIT = -1; int MAX_NEIGHBORS = 5; boolean DNS_REFRESHER_ENABLED = true; boolean DNS_RESOLUTION_ENABLED = true; //ixi String IXI_DIR = "ixi"; //DB String DB_PATH = "mainnetdb"; String DB_LOG_PATH = "mainnet.log"; String DB_CONFIG_FILE = "rocksdb-config.properties"; int DB_CACHE_SIZE = 100_000; String MAIN_DB = "rocksdb"; boolean REVALIDATE = false; boolean RESCAN_DB = false; //Protocol double P_SEND_MILESTONE = 0.02d; int MWM = 14; int REQUEST_HASH_SIZE = 46; int QUEUE_SIZE = 1_000; double P_DROP_CACHE_ENTRY = 0.02d; int CACHE_SIZE_BYTES = 150_000; //Zmq int ZMQ_THREADS = 1; boolean ZMQ_ENABLE_IPC = false; String ZMQ_IPC = "ipc://iri"; boolean ZMQ_ENABLE_TCP = false; int ZMQ_PORT = 5556; //TipSel int MAX_DEPTH = 15; double ALPHA = 0d; int TIP_SELECTION_TIMEOUT_SEC = 60; //PearlDiver int POW_THREADS = 0; //Coo Hash COORDINATOR = HashFactory.ADDRESS.create( "EQSAUZXULTTYZCLNJNTXQTQHOMOFZERHTCGTXOLTVAHKSA9OGAZDEKECURBRIXIJWNPFCQIOVFVVXJVD9"); int COORDINATOR_SECURITY_LEVEL = 2; SpongeFactory.Mode COORDINATOR_SIGNATURE_MODE = SpongeFactory.Mode.KERL; int NUMBER_OF_KEYS_IN_A_MILESTONE = 23; int MAX_MILESTONE_INDEX = 1 << NUMBER_OF_KEYS_IN_A_MILESTONE; //Snapshot boolean LOCAL_SNAPSHOTS_ENABLED = true; boolean LOCAL_SNAPSHOTS_PRUNING_ENABLED = false; String LOCAL_SNAPSHOTS_DB_MAX_SIZE = "-1"; int LOCAL_SNAPSHOTS_PRUNING_DELAY = 40000; int LOCAL_SNAPSHOTS_PRUNING_DELAY_MIN = 10000; int LOCAL_SNAPSHOTS_INTERVAL_SYNCED = 10; int LOCAL_SNAPSHOTS_INTERVAL_UNSYNCED = 1000; int LOCAL_SNAPSHOTS_DEPTH = 100; int LOCAL_SNAPSHOTS_DEPTH_MIN = 100; String LOCAL_SNAPSHOTS_DB_PATH = "localsnapshots-db"; String LOCAL_SNAPSHOTS_DB_LOG_PATH = "localsnapshots-log"; String SNAPSHOT_FILE = "/snapshotMainnet.txt"; String SNAPSHOT_SIGNATURE_FILE = "/snapshotMainnet.sig"; String PREVIOUS_EPOCHS_SPENT_ADDRESSES_FILE = "/previousEpochsSpentAddresses1.txt /previousEpochsSpentAddresses2.txt " + "/previousEpochsSpentAddresses3.txt"; long SNAPSHOT_TIME = 1554904800; int MILESTONE_START_INDEX = 1050000; int BELOW_MAX_DEPTH_TRANSACTION_LIMIT = 20_000; //Solidification boolean PRINT_SYNC_PROGRESS_ENABLED = true; } }
31,368
32.265111
169
java
iri
iri-master/src/main/java/com/iota/iri/conf/Config.java
package com.iota.iri.conf; /** * General configuration parameters that every module in IRI needs. */ public interface Config { String TESTNET_FLAG = "--testnet"; /** * @return {@value Descriptions#TESTNET} */ boolean isTestnet(); interface Descriptions { String TESTNET = "Start in testnet mode."; } class DescriptionHelper { protected static final String PROB_OF = "A number between 0 and 1 that represents the probability of "; } }
501
19.916667
112
java
iri
iri-master/src/main/java/com/iota/iri/conf/ConfigFactory.java
package com.iota.iri.conf; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.MapperFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.PropertyNamingStrategy; import com.fasterxml.jackson.databind.module.SimpleModule; import com.iota.iri.conf.deserializers.CustomBoolDeserializer; import com.iota.iri.conf.deserializers.CustomStringDeserializer; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.Properties; /** * Creates the global {@link IotaConfig} object with iri specific settings. */ public class ConfigFactory { /** * Creates the {@link IotaConfig} object for {@link TestnetConfig} or {@link MainnetConfig}. * * @param isTestnet true if {@link TestnetConfig} should be created. * @return return the {@link IotaConfig} configuration. */ public static IotaConfig createIotaConfig(boolean isTestnet) { IotaConfig iotaConfig; if (isTestnet) { iotaConfig = new TestnetConfig(); } else { iotaConfig = new MainnetConfig(); } return iotaConfig; } /** * Creates the {@link IotaConfig} object for {@link TestnetConfig} or {@link MainnetConfig} from config file. Parse * the config file for <code>TESTNET=true</code>. If <code>TESTNET=true</code> is found we creates the * {@link TestnetConfig} object, else creates the {@link MainnetConfig}. * * @param configFile A property file with configuration options. * @param testnet When true a {@link TestnetConfig} is created. * @return the {@link IotaConfig} configuration. * * @throws IOException When config file could not be found. */ public static IotaConfig createFromFile(File configFile, boolean testnet) throws IOException { IotaConfig iotaConfig; try (FileInputStream confStream = new FileInputStream(configFile)) { Properties props = new Properties(); props.load(confStream); boolean isTestnet = testnet || Boolean.parseBoolean(props.getProperty("TESTNET", "false")); Class<? extends IotaConfig> iotaConfigClass = isTestnet ? TestnetConfig.class : MainnetConfig.class; ObjectMapper objectMapper = new ObjectMapper(); objectMapper.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true); objectMapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true); objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); objectMapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, true); objectMapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE); SimpleModule booleanParser = new SimpleModule("BooleanParser"); booleanParser.addDeserializer(Boolean.TYPE, new CustomBoolDeserializer()); objectMapper.registerModule(booleanParser); SimpleModule stringParser = new SimpleModule("StringParser"); stringParser.addDeserializer(String.class, new CustomStringDeserializer()); objectMapper.registerModule(stringParser); iotaConfig = objectMapper.convertValue(props, iotaConfigClass); } return iotaConfig; } }
3,388
42.448718
119
java
iri
iri-master/src/main/java/com/iota/iri/conf/ConsensusConfig.java
package com.iota.iri.conf; /** * A configuration for all configuration concerned with achieving consensus on the ledger state across different nodes * * @implNote It currently extends two other interfaces. This has been done due to lack of separation of concerns in * the current code base and will be changed in the future */ public interface ConsensusConfig extends SnapshotConfig, MilestoneConfig { }
410
36.363636
118
java
iri
iri-master/src/main/java/com/iota/iri/conf/DbConfig.java
package com.iota.iri.conf; /** * Configurations for tangle database. */ public interface DbConfig extends Config { /** * Default Value: {@value BaseIotaConfig.Defaults#DB_PATH} * * @return {@value DbConfig.Descriptions#DB_PATH} */ String getDbPath(); /** * Default Value: {@value BaseIotaConfig.Defaults#DB_LOG_PATH} * * @return {@value DbConfig.Descriptions#DB_LOG_PATH} */ String getDbLogPath(); /** * Default Value: {@value BaseIotaConfig.Defaults#DB_CONFIG_FILE} * * @return {@value DbConfig.Descriptions#DB_CONFIG_FILE} */ String getDbConfigFile(); /** * Default Value: {@value BaseIotaConfig.Defaults#DB_CACHE_SIZE} * * @return {@value DbConfig.Descriptions#DB_CACHE_SIZE} */ int getDbCacheSize(); /** * Default Value: {@value BaseIotaConfig.Defaults#MAIN_DB} * * @return {@value DbConfig.Descriptions#MAIN_DB} */ String getMainDb(); /** * Default Value: {@value BaseIotaConfig.Defaults#REVALIDATE} * * @return {@value DbConfig.Descriptions#REVALIDATE} */ boolean isRevalidate(); /** * Default Value: {@value BaseIotaConfig.Defaults#RESCAN_DB} * * @return {@value DbConfig.Descriptions#RESCAN_DB} */ boolean isRescanDb(); interface Descriptions { String DB_PATH = "The folder where the DB saves its data."; String DB_LOG_PATH = "The folder where the DB logs info"; String DB_CACHE_SIZE = "The size of the DB cache in KB"; String MAIN_DB = "The DB engine used to store the transactions. Currently only RocksDB is supported."; String REVALIDATE = "Reload from the db data about confirmed transaction (milestones), state of the ledger, " + "and transaction metadata."; String RESCAN_DB = "Rescan all transaction metadata (Approvees, Bundles, and Tags)"; String DB_CONFIG_FILE = "The location of the RocksDB configuration file"; } }
2,027
28.391304
119
java
iri
iri-master/src/main/java/com/iota/iri/conf/IXIConfig.java
package com.iota.iri.conf; /** * Configurations for IXI modules */ public interface IXIConfig extends Config { String IXI_DIR = "ixi"; /** * Default Value: {@value BaseIotaConfig.Defaults#IXI_DIR} * * @return {@value IXIConfig.Descriptions#IXI_DIR} */ String getIxiDir(); interface Descriptions { String IXI_DIR = "The folder where ixi modules should be added for automatic discovery by IRI."; } }
452
20.571429
104
java
iri
iri-master/src/main/java/com/iota/iri/conf/IotaConfig.java
package com.iota.iri.conf; import com.beust.jcommander.JCommander; import com.beust.jcommander.ParameterException; import java.io.File; /** * A container for all possible configuration parameters of IRI. * In charge of how we parse the configuration from given inputs. */ public interface IotaConfig extends APIConfig, NodeConfig, IXIConfig, DbConfig, ConsensusConfig, ZMQConfig, TipSelConfig, PearlDiverConfig, SolidificationConfig { File CONFIG_FILE = new File("iota.ini"); /** * Parses the args to populate the configuration object * * @param args command line args * @return {@link JCommander} instance that was used for parsing. It contains metadata about the parsing. * @throws ParameterException if the parsing failed */ JCommander parseConfigFromArgs(String[] args) throws ParameterException; boolean isHelp(); }
885
31.814815
111
java
iri
iri-master/src/main/java/com/iota/iri/conf/MainnetConfig.java
package com.iota.iri.conf; public class MainnetConfig extends BaseIotaConfig { public MainnetConfig() { //All the configs are defined in the super class super(); } @Override public boolean isTestnet() { return false; } }
268
16.933333
56
java
iri
iri-master/src/main/java/com/iota/iri/conf/MilestoneConfig.java
package com.iota.iri.conf; import com.iota.iri.crypto.SpongeFactory; import com.iota.iri.model.Hash; /** * Configs that should be used for tracking milestones */ public interface MilestoneConfig extends Config { /** * Default Value: {@link BaseIotaConfig.Defaults#COORDINATOR} * * @return {@value MilestoneConfig.Descriptions#COORDINATOR} */ Hash getCoordinator(); /** * Default Value: {@value TestnetConfig.Defaults#DONT_VALIDATE_TESTNET_MILESTONE_SIG} * * @return {@value MilestoneConfig.Descriptions#DONT_VALIDATE_TESTNET_MILESTONE_SIG} */ boolean isDontValidateTestnetMilestoneSig(); /** * Default Value: {@value BaseIotaConfig.Defaults#NUMBER_OF_KEYS_IN_A_MILESTONE} * @return {@value Descriptions#NUMBER_OF_KEYS_IN_A_MILESTONE} */ int getNumberOfKeysInMilestone(); /** * This is a meta-config. Its value depends on {@link #getNumberOfKeysInMilestone()} * @return the maximal amount of possible milestones that can be issued */ int getMaxMilestoneIndex(); /** * Default Value: {@value BaseIotaConfig.Defaults#COORDINATOR_SECURITY_LEVEL} * @return {@value Descriptions#COORDINATOR_SECURITY_LEVEL} */ int getCoordinatorSecurityLevel(); /** * Default Value: {@link BaseIotaConfig.Defaults#COORDINATOR_SIGNATURE_MODE} * @return {@value Descriptions#COORDINATOR_SIGNATURE_MODE} */ SpongeFactory.Mode getCoordinatorSignatureMode(); interface Descriptions { String COORDINATOR = "The address of the coordinator"; String COORDINATOR_SECURITY_LEVEL = "The security level used in coordinator signatures"; String COORDINATOR_SIGNATURE_MODE = "The signature mode used in coordinator signatures"; String DONT_VALIDATE_TESTNET_MILESTONE_SIG = "Disable coordinator validation on testnet"; String NUMBER_OF_KEYS_IN_A_MILESTONE = "The depth of the Merkle tree which in turn determines the number of" + "leaves (private keys) that the coordinator can use to sign a message."; } }
2,091
34.457627
118
java
iri
iri-master/src/main/java/com/iota/iri/conf/NetworkConfig.java
package com.iota.iri.conf; import java.util.List; /** * Configurations for the node networking. Including ports, DNS settings, list of neighbors, * and various optimization parameters. */ public interface NetworkConfig extends Config { /** * Default Value: {@value BaseIotaConfig.Defaults#NEIGHBORING_SOCKET_ADDRESS} * * @return {@value NetworkConfig.Descriptions#NEIGHBORING_SOCKET_ADDRESS} */ String getNeighboringSocketAddress(); /** * Default Value: {@value BaseIotaConfig.Defaults#NEIGHBORING_SOCKET_PORT} * * @return {@value NetworkConfig.Descriptions#NEIGHBORING_SOCKET_PORT} */ int getNeighboringSocketPort(); /** * Default Value: {@value BaseIotaConfig.Defaults#RECONNECT_ATTEMPT_INTERVAL_SECONDS} * * @return {@value NetworkConfig.Descriptions#RECONNECT_ATTEMPT_INTERVAL_SECONDS} */ int getReconnectAttemptIntervalSeconds(); /** * Default Value: {@value BaseIotaConfig.Defaults#AUTO_TETHERING_ENABLED} * * @return {@value NetworkConfig.Descriptions#AUTO_TETHERING_ENABLED{ */ boolean isAutoTetheringEnabled(); /** * Default Value: {@value BaseIotaConfig.Defaults#SEND_LIMIT} * * @return {@value NetworkConfig.Descriptions#SEND_LIMIT} */ int getSendLimit(); /** * Default Value: {@value BaseIotaConfig.Defaults#MAX_NEIGHBORS} * * @return {@value NetworkConfig.Descriptions#MAX_NEIGHBORS} */ int getMaxNeighbors(); /** * Default Value: {@value BaseIotaConfig.Defaults#DNS_REFRESHER_ENABLED} * * @return {@value NetworkConfig.Descriptions#DNS_REFRESHER_ENABLED} */ boolean isDnsRefresherEnabled(); /** * Default Value: {@value BaseIotaConfig.Defaults#DNS_RESOLUTION_ENABLED} * * @return {@value NetworkConfig.Descriptions#DNS_RESOLUTION_ENABLED} */ boolean isDnsResolutionEnabled(); /** * @return {@value NetworkConfig.Descriptions#NEIGHBORS} */ List<String> getNeighbors(); /** * Default Value: {@value BaseIotaConfig.Defaults#QUEUE_SIZE} * @return {@value NetworkConfig.Descriptions#Q_SIZE_NODE} */ int getqSizeNode(); /** * Default Value: {@value BaseIotaConfig.Defaults#P_DROP_CACHE_ENTRY} * * @return {@value NetworkConfig.Descriptions#P_DROP_CACHE_ENTRY} */ double getpDropCacheEntry(); /** * Default Value: {@value BaseIotaConfig.Defaults#CACHE_SIZE_BYTES} * * @return {@value NetworkConfig.Descriptions#CACHE_SIZE_BYTES} */ int getCacheSizeBytes(); interface Descriptions { String NEIGHBORING_SOCKET_ADDRESS = "The address to bind the TCP server socket to."; String NEIGHBORING_SOCKET_PORT = "The TCP Receiver Port."; String RECONNECT_ATTEMPT_INTERVAL_SECONDS = "The interval at which to reconnect to wanted neighbors."; String AUTO_TETHERING_ENABLED = "Whether to accept new connections from unknown neighbors. " + "Unknown meaning neighbors which are not defined in the config and were not added via addNeighbors."; String SEND_LIMIT = "The maximum number of packets that may be sent by this node in a 1 second interval. If this number is below 0 then there is no limit."; String MAX_NEIGHBORS = "The maximum number of neighbors allowed to be connected."; String DNS_REFRESHER_ENABLED = "Reconnect to neighbors that have dynamic IPs."; String DNS_RESOLUTION_ENABLED = "Enable using DNS for neighbor peering."; String NEIGHBORS = "Urls of neighbor iota nodes."; String Q_SIZE_NODE = "The size of the REPLY, BROADCAST, and RECEIVE network queues."; String P_DROP_CACHE_ENTRY = DescriptionHelper.PROB_OF + "dropping recently seen transactions out of the network cache. " + "It may relieve cases of spam or transactions that weren't stored properly in the database"; String CACHE_SIZE_BYTES = "The size of the network cache in bytes"; } }
4,043
35.763636
164
java
iri
iri-master/src/main/java/com/iota/iri/conf/NodeConfig.java
package com.iota.iri.conf; /** * A configuration that specifies how the node communicates with other nodes. * * @implNote It currently extends two other interfaces. This has been done due to lack of separation of concerns in * the current code base and will be changed in the future */ public interface NodeConfig extends ProtocolConfig, NetworkConfig { }
362
32
115
java
iri
iri-master/src/main/java/com/iota/iri/conf/PearlDiverConfig.java
package com.iota.iri.conf; /** * Configurations for PearlDiver proof-of-work hasher. */ public interface PearlDiverConfig extends Config { /** * Default Value: {@value BaseIotaConfig.Defaults#POW_THREADS} * * @return {@value PearlDiverConfig.Descriptions#POW_THREADS} */ int getPowThreads(); /** * Field descriptions */ interface Descriptions { String POW_THREADS = "Number of threads to use for proof-of-work calculation. " + "0 means you default to a number that depends on the number of cores your machine has."; } }
597
25
104
java
iri
iri-master/src/main/java/com/iota/iri/conf/ProtocolConfig.java
package com.iota.iri.conf; import com.iota.iri.model.Hash; /** * Configuration for protocol rules. Controls what transactions will be accepted by the network, and how they will * be propagated to other nodes. **/ public interface ProtocolConfig extends Config { /** * Default Value: {@value BaseIotaConfig.Defaults#MWM} * * @return {@value ProtocolConfig.Descriptions#MWM} */ int getMwm(); /** * * @return {@value ProtocolConfig.Descriptions#COORDINATOR} */ Hash getCoordinator(); /** * Default Value: {@value BaseIotaConfig.Defaults#REQUEST_HASH_SIZE} * * @return {@value ProtocolConfig.Descriptions#REQUEST_HASH_SIZE} */ int getRequestHashSize(); /** * Default Value: {@value BaseIotaConfig.Defaults#P_SEND_MILESTONE} * * @return {@value ProtocolConfig.Descriptions#P_SEND_MILESTONE} */ double getpSendMilestone(); interface Descriptions { String MWM = "The minimum weight magnitude is the number of trailing 0s that must appear in the end of a transaction hash. Increasing this number by 1 will result in proof of work that is 3 times as hard."; String COORDINATOR = "The address of the coordinator"; String REQUEST_HASH_SIZE = "The size of the requested hash in a packet. Its size is derived from the minimal MWM value the network accepts. The larger the MWM -> the more trailing zeroes we can ignore -> smaller hash size."; String P_SEND_MILESTONE = DescriptionHelper.PROB_OF + "sending a milestone transaction when the node looks for a random transaction to send to a neighbor."; } }
1,647
35.622222
232
java
iri
iri-master/src/main/java/com/iota/iri/conf/SnapshotConfig.java
package com.iota.iri.conf; /** * Configurations for handling global snapshot data */ public interface SnapshotConfig extends Config { /** * Default Value: {@value BaseIotaConfig.Defaults#LOCAL_SNAPSHOTS_ENABLED} * * @return {@value SnapshotConfig.Descriptions#LOCAL_SNAPSHOTS_ENABLED} */ boolean getLocalSnapshotsEnabled(); /** * Default Value: {@value BaseIotaConfig.Defaults#LOCAL_SNAPSHOTS_PRUNING_ENABLED} * * @return {@value SnapshotConfig.Descriptions#LOCAL_SNAPSHOTS_PRUNING_ENABLED} */ boolean getLocalSnapshotsPruningEnabled(); /** * Default Value: {@value BaseIotaConfig.Defaults#LOCAL_SNAPSHOTS_PRUNING_DELAY} * * @return {@value SnapshotConfig.Descriptions#LOCAL_SNAPSHOTS_PRUNING_DELAY} */ int getLocalSnapshotsPruningDelay(); /** * Default Value: {@value BaseIotaConfig.Defaults#LOCAL_SNAPSHOTS_INTERVAL_SYNCED} * * @return {@value SnapshotConfig.Descriptions#LOCAL_SNAPSHOTS_INTERVAL_SYNCED} */ int getLocalSnapshotsIntervalSynced(); /** * Default Value: {@value BaseIotaConfig.Defaults#LOCAL_SNAPSHOTS_INTERVAL_UNSYNCED} * * @return {@value SnapshotConfig.Descriptions#LOCAL_SNAPSHOTS_INTERVAL_UNSYNCED} */ int getLocalSnapshotsIntervalUnsynced(); /** * Default Value: {@value BaseIotaConfig.Defaults#LOCAL_SNAPSHOTS_DEPTH} * * @return {@value SnapshotConfig.Descriptions#LOCAL_SNAPSHOTS_DEPTH} */ int getLocalSnapshotsDepth(); /** * Default Value: {@value BaseIotaConfig.Defaults#LOCAL_SNAPSHOTS_DB_MAX_SIZE} * * @return {@value SnapshotConfig.Descriptions#LOCAL_SNAPSHOTS_DB_MAX_SIZE} */ String getLocalSnapshotsDbMaxSize(); /** * Default Value: {@value BaseIotaConfig.Defaults#SNAPSHOT_TIME} * * @return {@value SnapshotConfig.Descriptions#SNAPSHOT_TIME} */ long getSnapshotTime(); /** * Default Value: {@value BaseIotaConfig.Defaults#SNAPSHOT_FILE} * * return {@value SnapshotConfig.Descriptions#SNAPSHOT_FILE} */ String getSnapshotFile(); /** * Default Value: {@value BaseIotaConfig.Defaults#SNAPSHOT_SIGNATURE_FILE} * * @return {@value SnapshotConfig.Descriptions#SNAPSHOT_SIGNATURE_FILE} */ String getSnapshotSignatureFile(); /** * Default Value: {@value BaseIotaConfig.Defaults#MILESTONE_START_INDEX} * * @return {@value SnapshotConfig.Descriptions#MILESTONE_START_INDEX} */ int getMilestoneStartIndex(); /** * Default Value: {@value BaseIotaConfig.Defaults#PREVIOUS_EPOCHS_SPENT_ADDRESSES_FILE} * * @return {@value SnapshotConfig.Descriptions#PREVIOUS_EPOCH_SPENT_ADDRESSES_FILE} */ String getPreviousEpochSpentAddressesFiles(); /** * Default Value: {@value BaseIotaConfig.Defaults#LOCAL_SNAPSHOTS_DB_PATH} * * @return {@value SnapshotConfig.Descriptions#LOCAL_SNAPSHOTS_DB_PATH} */ String getLocalSnapshotsDbPath(); /** * Default Value: {@value BaseIotaConfig.Defaults#LOCAL_SNAPSHOTS_DB_LOG_PATH} * * @return {@value SnapshotConfig.Descriptions#LOCAL_SNAPSHOTS_DB_LOG_PATH} */ String getLocalSnapshotsDbLogPath(); interface Descriptions { String LOCAL_SNAPSHOTS_ENABLED = "Flag that determines if local snapshots are enabled."; String LOCAL_SNAPSHOTS_PRUNING_ENABLED = "Flag that determines if pruning of old data is enabled."; String LOCAL_SNAPSHOTS_PRUNING_DELAY = "Only prune data that precedes the local snapshot by n milestones."; String LOCAL_SNAPSHOTS_INTERVAL_SYNCED = "Take local snapshots every n milestones if the node is fully synced."; String LOCAL_SNAPSHOTS_INTERVAL_UNSYNCED = "Take local snapshots every n milestones if the node is syncing."; String LOCAL_SNAPSHOTS_DEPTH = "Number of milestones to keep."; String LOCAL_SNAPSHOTS_DB_MAX_SIZE = "The maximum size this database should be on disk. Human readable format (GB, GiB, MB, MiB)." + "If set to -1, the database size will not affect the node. Without a suffix, we default to GB"; String SNAPSHOT_TIME = "Epoch time of the last snapshot."; String SNAPSHOT_FILE = "Path of the file that contains the state of the ledger at the last snapshot."; String SNAPSHOT_SIGNATURE_FILE = "Path to the file that contains a signature for the snapshot file."; String MILESTONE_START_INDEX = "The start index of the milestones. This index is encoded in each milestone " + "transaction by the coordinator."; String PREVIOUS_EPOCH_SPENT_ADDRESSES_FILE = "The file that contains the list of all used addresses " + "from previous epochs"; String LOCAL_SNAPSHOTS_DB_PATH = "The folder where the local snapshots DB saves its data."; String LOCAL_SNAPSHOTS_DB_LOG_PATH = "The folder where the local snapshots DB saves its logs."; } }
4,992
38.314961
138
java
iri
iri-master/src/main/java/com/iota/iri/conf/SolidificationConfig.java
package com.iota.iri.conf; import com.iota.iri.model.Hash; /** * * Configurations that should be used for the solidification processes. */ public interface SolidificationConfig extends Config { /** * Default Value: {@value BaseIotaConfig.Defaults#PRINT_SYNC_PROGRESS_ENABLED} * * @return {@value SolidificationConfig.Descriptions#PRINT_SYNC_PROGRESS_ENABLED} */ boolean isPrintSyncProgressEnabled(); /** * @return the coordinator address hash */ Hash getCoordinator(); /** * Field descriptions */ interface Descriptions { String PRINT_SYNC_PROGRESS_ENABLED = "Whether the node should print out progress when synchronizing."; } }
714
22.833333
110
java
iri
iri-master/src/main/java/com/iota/iri/conf/TestnetConfig.java
package com.iota.iri.conf; import java.util.Objects; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.fasterxml.jackson.annotation.JsonProperty; import com.iota.iri.crypto.SpongeFactory; import com.iota.iri.model.Hash; import com.iota.iri.model.HashFactory; public class TestnetConfig extends BaseIotaConfig { protected Hash coordinator = Defaults.COORDINATOR_ADDRESS; protected int numberOfKeysInMilestone = Defaults.KEYS_IN_MILESTONE; protected int maxMilestoneIndex = Defaults.MAX_MILESTONE_INDEX; protected int coordinatorSecurityLevel = Defaults.COORDINATOR_SECURITY_LEVEL; protected boolean dontValidateTestnetMilestoneSig = Defaults.DONT_VALIDATE_TESTNET_MILESTONE_SIG; protected String snapshotFile = Defaults.SNAPSHOT_FILE; protected String snapshotSignatureFile = Defaults.SNAPSHOT_SIG; protected long snapshotTime = Defaults.SNAPSHOT_TIME; protected int mwm = Defaults.MWM; protected int milestoneStartIndex = Defaults.MILESTONE_START_INDEX; protected int transactionPacketSize = Defaults.PACKET_SIZE; protected int requestHashSize = Defaults.REQUEST_HASH_SIZE; protected SpongeFactory.Mode coordinatorSignatureMode = Defaults.COORDINATOR_SIGNATURE_MODE; public TestnetConfig() { super(); dbPath = Defaults.DB_PATH; dbLogPath = Defaults.DB_LOG_PATH; } @Override public boolean isTestnet() { return true; } @Override public Hash getCoordinator() { return coordinator; } @JsonProperty @Parameter(names = "--testnet-coordinator", description = MilestoneConfig.Descriptions.COORDINATOR) protected void setCoordinator(String coordinator) { this.coordinator = HashFactory.ADDRESS.create(coordinator); } @Override public boolean isDontValidateTestnetMilestoneSig() { return dontValidateTestnetMilestoneSig; } @JsonProperty @Parameter(names = "--testnet-no-coo-validation", description = MilestoneConfig.Descriptions.DONT_VALIDATE_TESTNET_MILESTONE_SIG, arity = 1) protected void setDontValidateTestnetMilestoneSig(boolean dontValidateTestnetMilestoneSig) { this.dontValidateTestnetMilestoneSig = dontValidateTestnetMilestoneSig; } @Override public int getNumberOfKeysInMilestone() { return numberOfKeysInMilestone; } @JsonProperty("NUMBER_OF_KEYS_IN_A_MILESTONE") @Parameter(names = "--milestone-keys", description = MilestoneConfig.Descriptions.NUMBER_OF_KEYS_IN_A_MILESTONE) protected void setNumberOfKeysInMilestone(int numberOfKeysInMilestone) { this.numberOfKeysInMilestone = numberOfKeysInMilestone; this.maxMilestoneIndex = 1 << numberOfKeysInMilestone; } @Override public int getMaxMilestoneIndex() { return maxMilestoneIndex; } @Override public int getCoordinatorSecurityLevel() { return coordinatorSecurityLevel; } @JsonProperty("COORDINATOR_SECURITY_LEVEL") @Parameter(names = "--testnet-coordinator-security-level", description = MilestoneConfig.Descriptions.COORDINATOR_SECURITY_LEVEL) protected void setCoordinatorSecurityLevel(int coordinatorSecurityLevel) { this.coordinatorSecurityLevel = coordinatorSecurityLevel; } @Override public SpongeFactory.Mode getCoordinatorSignatureMode() { return coordinatorSignatureMode; } @JsonProperty("COORDINATOR_SIGNATURE_MODE") @Parameter(names = "--testnet-coordinator-signature-mode", description = MilestoneConfig.Descriptions.COORDINATOR_SIGNATURE_MODE) protected void setCoordinatorSignatureMode(SpongeFactory.Mode coordinatorSignatureMode) { this.coordinatorSignatureMode = coordinatorSignatureMode; } @Override public String getSnapshotFile() { return snapshotFile; } @JsonProperty @Parameter(names = "--snapshot", description = SnapshotConfig.Descriptions.SNAPSHOT_FILE) protected void setSnapshotFile(String snapshotFile) { this.snapshotFile = snapshotFile; } @Override public String getSnapshotSignatureFile() { return snapshotSignatureFile; } @JsonProperty @Parameter(names = "--snapshot-sig", description = SnapshotConfig.Descriptions.SNAPSHOT_SIGNATURE_FILE) protected void setSnapshotSignatureFile(String snapshotSignatureFile) { this.snapshotSignatureFile = snapshotSignatureFile; } @Override public long getSnapshotTime() { return snapshotTime; } @JsonProperty @Parameter(names = "--snapshot-timestamp", description = SnapshotConfig.Descriptions.SNAPSHOT_TIME) protected void setSnapshotTime(long snapshotTime) { this.snapshotTime = snapshotTime; } @Override public int getMwm() { return mwm; } @JsonProperty @Parameter(names = "--mwm", description = ProtocolConfig.Descriptions.MWM) protected void setMwm(int mwm) { this.mwm = mwm; } @Override public int getMilestoneStartIndex() { return milestoneStartIndex; } @JsonProperty @Parameter(names = "--milestone-start", description = SnapshotConfig.Descriptions.MILESTONE_START_INDEX) protected void setMilestoneStartIndex(int milestoneStartIndex) { this.milestoneStartIndex = milestoneStartIndex; } @Override public int getRequestHashSize() { return requestHashSize; } @JsonProperty @Parameter(names = {"--request-hash-size"}, description = ProtocolConfig.Descriptions.REQUEST_HASH_SIZE) public void setRequestHashSize(int requestHashSize) { this.requestHashSize = requestHashSize; } @JsonProperty @Override public void setDbPath(String dbPath) { if (Objects.equals(MainnetConfig.Defaults.DB_PATH, dbPath)) { throw new ParameterException("Testnet Db folder cannot be configured to mainnet's db folder"); } super.setDbPath(dbPath); } @JsonProperty @Override public void setDbLogPath(String dbLogPath) { if (Objects.equals(MainnetConfig.Defaults.DB_LOG_PATH, dbLogPath)) { throw new ParameterException("Testnet Db log folder cannot be configured to mainnet's db log folder"); } super.setDbLogPath(dbLogPath); } public interface Defaults { Hash COORDINATOR_ADDRESS = HashFactory.ADDRESS.create( "EQQFCZBIHRHWPXKMTOLMYUYPCN9XLMJPYZVFJSAY9FQHCCLWTOLLUGKKMXYFDBOOYFBLBI9WUEILGECYM"); boolean DONT_VALIDATE_TESTNET_MILESTONE_SIG = false; int COORDINATOR_SECURITY_LEVEL = 1; SpongeFactory.Mode COORDINATOR_SIGNATURE_MODE = SpongeFactory.Mode.CURLP27; int KEYS_IN_MILESTONE = 22; int MAX_MILESTONE_INDEX = 1 << KEYS_IN_MILESTONE; String SNAPSHOT_FILE = "/snapshotTestnet.txt"; int REQUEST_HASH_SIZE = 49; String SNAPSHOT_SIG = "/snapshotTestnet.sig"; int SNAPSHOT_TIME = 1522306500; int MWM = 9; int MILESTONE_START_INDEX = 434525; int PACKET_SIZE = 1653; String DB_PATH = "testnetdb"; String DB_LOG_PATH = "testnetdb.log"; } }
7,205
33.811594
133
java
iri
iri-master/src/main/java/com/iota/iri/conf/TipSelConfig.java
package com.iota.iri.conf; /** * Configuration for how we perform tip selections. Tip selection is invoked when a client wants to find tips to * attach its transactions to. The tips are invoked via random walks that start at a certain point in the tangle. * The parameters here affect the length and randomness of this walk. */ public interface TipSelConfig extends Config { /** * Default Value: {@value BaseIotaConfig.Defaults#MAX_DEPTH} * * @return {@value TipSelConfig.Descriptions#MAX_DEPTH} */ int getMaxDepth(); /** * Default Value: {@value BaseIotaConfig.Defaults#ALPHA} * * @return {@value TipSelConfig.Descriptions#ALPHA} */ double getAlpha(); /** * Default Value: {@value BaseIotaConfig.Defaults#TIP_SELECTION_TIMEOUT_SEC} * * @return {@value TipSelConfig.Descriptions#TIP_SELECTION_TIMEOUT_SEC} */ int getTipSelectionTimeoutSec(); /** * Default Value: {@value BaseIotaConfig.Defaults#BELOW_MAX_DEPTH_TRANSACTION_LIMIT} * * @return {@value TipSelConfig.Descriptions#BELOW_MAX_DEPTH_TRANSACTION_LIMIT} */ int getBelowMaxDepthTransactionLimit(); interface Descriptions { String MAX_DEPTH = "The maximal number of previous milestones from where you can perform the random walk"; String ALPHA = "Parameter that defines the randomness of the tip selection. " + "Should be a number between 0 to infinity, where 0 is most random and infinity is most deterministic."; String TIP_SELECTION_TIMEOUT_SEC = "Defines the maximum number of seconds the tip-selection is allowed to be ongoing. " + "If the threshold is exceeded, tip-selection is aborted."; String BELOW_MAX_DEPTH_TRANSACTION_LIMIT = "The maximal number of unconfirmed transactions that may be analyzed in " + "order to find the latest milestone the transaction that we are stepping on during the walk approves"; } }
1,989
39.612245
127
java
iri
iri-master/src/main/java/com/iota/iri/conf/ZMQConfig.java
package com.iota.iri.conf; public interface ZMQConfig extends Config { /** * @return Descriptions#ZMQ_ENABLED */ boolean isZmqEnabled(); /** * @return Descriptions#ZMQ_ENABLE_TCP */ boolean isZmqEnableTcp(); /** * @return Descriptions#ZMQ_ENABLE_IPC */ boolean isZmqEnableIpc(); /** * @return Descriptions#ZMQ_PORT */ int getZmqPort(); /** * @return Descriptions#ZMQ_THREADS */ int getZmqThreads(); /** * @return Descriptions#ZMQ_IPC */ String getZmqIpc(); interface Descriptions { String ZMQ_PORT = "The port used to connect to the ZMQ feed"; String ZMQ_IPC = "The path that is used to communicate with ZMQ in IPC"; String ZMQ_ENABLED = "Enable zmq channels (deprecated). Use --zmq-enable-tcp or --zmq-enable-ipc instead"; String ZMQ_ENABLE_TCP = "Enable zmq channels on tcp port 5556. Use --zmq-port=[PORT] to override."; String ZMQ_ENABLE_IPC = "Enable zmq channels on ipc://iri. Use --zmq-ipc=[SOCKET] to override."; String ZMQ_THREADS = "The threads used by ZMQ publisher"; } }
1,155
25.272727
114
java
iri
iri-master/src/main/java/com/iota/iri/conf/deserializers/CustomBoolDeserializer.java
package com.iota.iri.conf.deserializers; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; import org.apache.commons.lang3.StringUtils; import java.io.IOException; /** * Deserialize boolean type. */ public class CustomBoolDeserializer extends StdDeserializer<Boolean>{ /** * Default constructor */ public CustomBoolDeserializer() { super(Boolean.class); } @Override public Boolean deserialize(JsonParser parser, DeserializationContext ctxt) throws IOException { JsonToken jsonToken = parser.getCurrentToken(); if (jsonToken == JsonToken.VALUE_TRUE) { return true; } if (jsonToken == JsonToken.VALUE_FALSE) { return false; } if (jsonToken == JsonToken.VALUE_NULL) { return parseNull(ctxt); } if (jsonToken == JsonToken.VALUE_STRING) { String text = parser.getText().trim(); if (StringUtils.isEmpty(text)) { return parseNull(ctxt); } return Boolean.valueOf(text); } return false; } private Boolean parseNull(DeserializationContext ctxt) throws IOException { _verifyNullForPrimitive(ctxt); return false; } }
1,427
27.56
99
java
iri
iri-master/src/main/java/com/iota/iri/conf/deserializers/CustomStringDeserializer.java
package com.iota.iri.conf.deserializers; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; import java.io.IOException; /** * Deserialize string and trims all leading and trailing whitespaces from string. */ public class CustomStringDeserializer extends StdDeserializer<String> { /** * Default constructor */ public CustomStringDeserializer() { super(String.class); } @Override public String deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { return jsonParser.getValueAsString().trim(); } }
808
28.962963
145
java
iri
iri-master/src/main/java/com/iota/iri/controllers/AddressViewModel.java
package com.iota.iri.controllers; import com.iota.iri.model.AddressHash; import com.iota.iri.model.Hash; import com.iota.iri.model.persistables.Address; import com.iota.iri.storage.Indexable; import com.iota.iri.storage.Persistable; import com.iota.iri.storage.Tangle; import com.iota.iri.utils.Pair; import java.util.Set; /** * Acts as a controller interface for an {@link Address} set. This controller is used within a * {@link TransactionViewModel} to manipulate an {@link Address} set. */ public class AddressViewModel implements HashesViewModel { private Address self; private Indexable hash; /** * Constructor for an {@link Address} set controller from a {@link Hash} identifier. * @param hash The {@link Hash} identifier that the controller will be created for. */ public AddressViewModel(Hash hash) { this.hash = hash; } /** * Constructor for an {@link Address} set controller from an existing {@link Address} set. If the set is empty, an * {@link Address} set is created. * * @param hashes The {@link Address} set that the controller will be created from * @param hash The {@link Hash} identifier that acts as a reference for the {@link Address} set */ private AddressViewModel(Address hashes, Indexable hash) { self = hashes == null || hashes.set == null ? new Address(): hashes; this.hash = hash; } /** * Creates a new {@link Address} set controller. This controller is created by extracting the {@link Address} set * from the database using the provided {@link Hash} identifier. * * @param tangle The tangle reference for the database to find the {@link Address} set in * @param hash The hash identifier for the {@link Address} set that needs to be found * @return The {@link AddressViewModel} controller generated * @throws Exception Thrown if the database cannot load an {@link Address} set from the reference {@link Hash} */ public static AddressViewModel load(Tangle tangle, Indexable hash) throws Exception { return new AddressViewModel((Address) tangle.load(Address.class, hash), hash); } /** * Fetches the first persistable {@link Address} set from the database and generates a new {@link AddressViewModel} * from it. If no {@link Address} sets exist in the database, it will return null. * * @param tangle the tangle reference for the database * @return The new {@link AddressViewModel} * @throws Exception Thrown if the database fails to return a first object */ public static AddressViewModel first(Tangle tangle) throws Exception { Pair<Indexable, Persistable> bundlePair = tangle.getFirst(Address.class, AddressHash.class); if(bundlePair != null && bundlePair.hi != null) { return new AddressViewModel((Address) bundlePair.hi, (Hash) bundlePair.low); } return null; } @Override public boolean store(Tangle tangle) throws Exception { return tangle.save(self, hash); } @Override public int size() { return self.set.size(); } @Override public boolean addHash(Hash theHash) { return getHashes().add(theHash); } @Override public Indexable getIndex() { return hash; } @Override public Set<Hash> getHashes() { return self.set; } @Override public void delete(Tangle tangle) throws Exception { tangle.delete(Address.class,hash); } @Override public AddressViewModel next(Tangle tangle) throws Exception { Pair<Indexable, Persistable> bundlePair = tangle.next(Address.class, hash); if(bundlePair != null && bundlePair.hi != null) { return new AddressViewModel((Address) bundlePair.hi, (Hash) bundlePair.low); } return null; } }
3,886
34.66055
119
java
iri
iri-master/src/main/java/com/iota/iri/controllers/ApproveeViewModel.java
package com.iota.iri.controllers; import com.iota.iri.model.Hash; import com.iota.iri.model.TransactionHash; import com.iota.iri.model.persistables.Approvee; import com.iota.iri.storage.Indexable; import com.iota.iri.storage.Persistable; import com.iota.iri.storage.Tangle; import com.iota.iri.utils.Pair; import java.util.Set; /** * Acts as a controller interface for an {@link Approvee} set. This controller is used within a * {@link TransactionViewModel} to manipulate an {@link Approvee} set. */ public class ApproveeViewModel implements HashesViewModel { private Approvee self; private Indexable hash; /** * Constructor for an {@link Approvee} set controller from a {@link Hash} identifier. * @param hash The {@link Hash} identifier that the controller will be created for. */ public ApproveeViewModel(Hash hash) { this.hash = hash; } /** * Constructor for an {@link Approvee} set controller from an existing {@link Approvee} set. If the set is empty, a * new {@link Approvee} set is created. * * @param hashes The {@link Approvee} set that the controller will be created from * @param hash The {@link Hash} identifier that acts as a reference for the {@link Approvee} set */ private ApproveeViewModel(Approvee hashes, Indexable hash) { self = hashes == null || hashes.set == null ? new Approvee(): hashes; this.hash = hash; } /** * Creates a new {@link Approvee} set controller. This controller is created by extracting the {@link Approvee} set * from the database using the provided {@link Hash} identifier. * * @param tangle The tangle reference for the database to find the {@link Approvee} set in * @param hash The hash identifier for the {@link Approvee} set that needs to be found * @return The {@link ApproveeViewModel} controller generated * @throws Exception Thrown if the database cannot load an {@link Approvee} set from the reference {@link Hash} */ public static ApproveeViewModel load(Tangle tangle, Indexable hash) throws Exception { return new ApproveeViewModel((Approvee) tangle.load(Approvee.class, hash), hash); } /** * Fetches the first persistable {@link Approvee} set from the database and generates a new * {@link ApproveeViewModel} from it. If no {@link Approvee} sets exist in the database, it will return null. * * @param tangle the tangle reference for the database * @return The new {@link ApproveeViewModel} * @throws Exception Thrown if the database fails to return a first object */ public static ApproveeViewModel first(Tangle tangle) throws Exception { Pair<Indexable, Persistable> bundlePair = tangle.getFirst(Approvee.class, TransactionHash.class); if(bundlePair != null && bundlePair.hi != null) { return new ApproveeViewModel((Approvee) bundlePair.hi, (Hash) bundlePair.low); } return null; } @Override public boolean store(Tangle tangle) throws Exception { return tangle.save(self, hash); } @Override public int size() { return self.set.size(); } @Override public boolean addHash(Hash theHash) { return getHashes().add(theHash); } @Override public Indexable getIndex() { return hash; } @Override public Set<Hash> getHashes() { return self.set; } @Override public void delete(Tangle tangle) throws Exception { tangle.delete(Approvee.class,hash); } @Override public ApproveeViewModel next(Tangle tangle) throws Exception { Pair<Indexable, Persistable> bundlePair = tangle.next(Approvee.class, hash); if(bundlePair != null && bundlePair.hi != null) { return new ApproveeViewModel((Approvee) bundlePair.hi, (Hash) bundlePair.low); } return null; } }
3,935
35.110092
119
java
iri
iri-master/src/main/java/com/iota/iri/controllers/BundleViewModel.java
package com.iota.iri.controllers; import com.iota.iri.model.BundleHash; import com.iota.iri.model.Hash; import com.iota.iri.model.persistables.Bundle; import com.iota.iri.storage.Indexable; import com.iota.iri.storage.Persistable; import com.iota.iri.storage.Tangle; import com.iota.iri.utils.Pair; import java.util.Set; /** * Acts as a controller interface for a {@link Bundle} set. This controller is used within a * {@link TransactionViewModel} to manipulate a {@link Bundle} set. */ public class BundleViewModel implements HashesViewModel { private Bundle self; private Indexable hash; /** * Constructor for a {@link Bundle} set controller from a {@link Hash} identifier. * @param hash The {@link Hash} identifier that the controller will be created for. */ public BundleViewModel(Hash hash) { this.hash = hash; } /** * Constructor for a {@link Bundle} set controller from an existing {@link Bundle} set. If the set is empty, a new * {@link Bundle} set is created. * * @param hashes The {@link Bundle} set that the controller will be created from * @param hash The {@link Hash} identifier that acts as a reference for the {@link Bundle} set */ private BundleViewModel(Bundle hashes, Indexable hash) { self = hashes == null || hashes.set == null ? new Bundle(): hashes; this.hash = hash; } /** * Creates a new {@link Bundle} set controller. This controller is created by extracting the {@link Bundle} set * from the database using the provided {@link Hash} identifier. * * @param tangle The tangle reference for the database to find the {@link Bundle} set in * @param hash The hash identifier for the {@link Bundle} set that needs to be found * @return The {@link BundleViewModel} controller generated * @throws Exception Thrown if the database cannot load an {@link Bundle} set from the reference {@link Hash} */ public static BundleViewModel load(Tangle tangle, Indexable hash) throws Exception { return new BundleViewModel((Bundle) tangle.load(Bundle.class, hash), hash); } /** * Fetches the first persistable {@link Bundle} set from the database and generates a new * {@link BundleViewModel} from it. If no {@link Bundle} sets exist in the database, it will return null. * * @param tangle the tangle reference for the database * @return The new {@link BundleViewModel} * @throws Exception Thrown if the database fails to return a first object */ public static BundleViewModel first(Tangle tangle) throws Exception { Pair<Indexable, Persistable> bundlePair = tangle.getFirst(Bundle.class, BundleHash.class); if(bundlePair != null && bundlePair.hi != null) { return new BundleViewModel((Bundle) bundlePair.hi, (Hash) bundlePair.low); } return null; } /** * {@inheritDoc} */ @Override public boolean store(Tangle tangle) throws Exception { return tangle.save(self, hash); } @Override public int size() { return self.set.size(); } @Override public boolean addHash(Hash theHash) { return getHashes().add(theHash); } @Override public Indexable getIndex() { return hash; } @Override public Set<Hash> getHashes() { return self.set; } @Override public void delete(Tangle tangle) throws Exception { tangle.delete(Bundle.class,hash); } @Override public BundleViewModel next(Tangle tangle) throws Exception { Pair<Indexable, Persistable> bundlePair = tangle.next(Bundle.class, hash); if(bundlePair != null && bundlePair.hi != null) { return new BundleViewModel((Bundle) bundlePair.hi, (Hash) bundlePair.low); } return null; } }
3,882
33.669643
118
java
iri
iri-master/src/main/java/com/iota/iri/controllers/HashesViewModel.java
package com.iota.iri.controllers; import java.util.Set; import com.iota.iri.model.Hash; import com.iota.iri.storage.Indexable; import com.iota.iri.storage.Tangle; /** * Base implementation of a controller interface for sets of {@link com.iota.iri.model.persistables.Hashes}. */ public interface HashesViewModel { /** * Store the {@link com.iota.iri.model.persistables.Hashes} set and {@link Hash} reference to the database * * @param tangle The tangle reference for the database * @return True if the object was saved correctly, False if not * @throws Exception Thrown if the {@link com.iota.iri.model.persistables.Hashes} set or index {@link Hash} are null */ boolean store(Tangle tangle) throws Exception; /** * @return The size of the {@link com.iota.iri.model.persistables.Hashes} set referenced by the controller */ int size(); /** * Add a hash object to the controllers referenced {@link com.iota.iri.model.persistables.Hashes} set * * @param theHash The {@link Hash} identifier to be added to the set * @return True if the {@link com.iota.iri.model.persistables.Hashes} set is added correctly, False if not */ boolean addHash(Hash theHash); /** * @return The {@link Hash} identifier of the {@link com.iota.iri.model.persistables.Hashes} set */ Indexable getIndex(); /** * @return The {@link com.iota.iri.model.persistables.Hashes} set referenced by the controller */ Set<Hash> getHashes(); /** * Deletes a referenced {@link com.iota.iri.model.persistables.Hashes} set from the database * * @param tangle The tangle reference for the database * @throws Exception If the {@link com.iota.iri.model.persistables.Hashes} set does not exist or fails to be removed */ void delete(Tangle tangle) throws Exception; /** * Fetches the next indexed persistable {@link com.iota.iri.model.persistables.Hashes} set from the database and * generates a new {@link HashesViewModel} from it. If no {@link com.iota.iri.model.persistables.Hashes} sets exist * in the database, it will return null. * * @param tangle The tangle reference for the database * @return The new {@link HashesViewModel} * @throws Exception If the database fails to return a next {@link com.iota.iri.model.persistables.Hashes} set */ HashesViewModel next(Tangle tangle) throws Exception; }
2,469
37
120
java
iri
iri-master/src/main/java/com/iota/iri/controllers/LocalSnapshotViewModel.java
package com.iota.iri.controllers; import com.iota.iri.model.Hash; import com.iota.iri.model.IntegerIndex; import com.iota.iri.model.LocalSnapshot; import com.iota.iri.storage.PersistenceProvider; import java.util.Map; /** * Acts as a controller interface for a {@link LocalSnapshot}. */ public class LocalSnapshotViewModel { private final static IntegerIndex LS_KEY = new IntegerIndex(1); private LocalSnapshot localSnapshot; /** * Creates a {@link LocalSnapshot} controller using the {@link LocalSnapshotViewModel#LS_KEY} identifier as a reference point. * * @param provider The persistence provider from which to load the {@link LocalSnapshot} from * @return The new {@link LocalSnapshotViewModel} * @throws Exception Thrown if there is an error loading the {@link LocalSnapshot} from the database */ public static LocalSnapshotViewModel load(PersistenceProvider provider) throws Exception { return new LocalSnapshotViewModel((LocalSnapshot) provider.get(LocalSnapshot.class, LS_KEY)); } /** * Constructor for a {@link LocalSnapshot} controller using predefined local snapshot data. * * @param milestoneHash the hash of the milestone from which the local snapshot was made * @param milestoneIndex the index of the milestone * @param milestoneTimestamp the timestamp of the milestone * @param solidEntryPoints the solid entry points for this local snapshot * @param seenMilestones the seen milestones * @param ledgerState the ledger state of the given milestone */ public LocalSnapshotViewModel(Hash milestoneHash, int milestoneIndex, long milestoneTimestamp, Map<Hash, Integer> solidEntryPoints, Map<Hash, Integer> seenMilestones, Map<Hash, Long> ledgerState) { this.localSnapshot = new LocalSnapshot(); this.localSnapshot.milestoneHash = milestoneHash; this.localSnapshot.milestoneIndex = milestoneIndex; this.localSnapshot.milestoneTimestamp = milestoneTimestamp; this.localSnapshot.solidEntryPoints = solidEntryPoints; this.localSnapshot.numSolidEntryPoints = solidEntryPoints.size(); this.localSnapshot.seenMilestones = seenMilestones; this.localSnapshot.numSeenMilestones = seenMilestones.size(); this.localSnapshot.ledgerState = ledgerState; } /** * Creates a finalized {@link LocalSnapshot} controller. The referenced {@link LocalSnapshot} of this controller * cannot be modified. If the provided {@link LocalSnapshot} is null, an empty * {@link LocalSnapshot} will be created. * * @param localSnapshot The finalized {@link LocalSnapshot} the controller will be made for */ private LocalSnapshotViewModel(LocalSnapshot localSnapshot) { this.localSnapshot = localSnapshot == null || localSnapshot.ledgerState == null ? new LocalSnapshot() : localSnapshot; } /** @return True if the {@link LocalSnapshot} is empty, false if there is a variable present */ public boolean isEmpty() { return localSnapshot == null || localSnapshot.ledgerState == null || localSnapshot.ledgerState.size() == 0; } /** * Saves the {@link LocalSnapshot} under the {@link LocalSnapshotViewModel#LS_KEY} identifier to the database. * * @param provider The persistence provider to use to save the {@link LocalSnapshot} * @return True if the {@link LocalSnapshot} was saved correctly, False if not * @throws Exception Thrown if there is an error while saving the {@link LocalSnapshot} */ public boolean store(PersistenceProvider provider) throws Exception { return provider.save(localSnapshot, LS_KEY); } /** * Deletes the {@link LocalSnapshot} under the {@link LocalSnapshotViewModel#LS_KEY} identifier from the database. * * @param provider The persistence provider to use to delete the {@link LocalSnapshot} * @throws Exception Thrown if there is an error while removing the {@link LocalSnapshot} */ public void delete(PersistenceProvider provider) throws Exception { provider.delete(LocalSnapshot.class, LS_KEY); } }
4,232
45.01087
130
java
iri
iri-master/src/main/java/com/iota/iri/controllers/MilestoneViewModel.java
package com.iota.iri.controllers; import com.iota.iri.model.Hash; import com.iota.iri.model.IntegerIndex; import com.iota.iri.model.persistables.Milestone; import com.iota.iri.storage.Indexable; import com.iota.iri.storage.Persistable; import com.iota.iri.storage.Tangle; import com.iota.iri.utils.Pair; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * Acts as a controller interface for a {@link Milestone} hash object. This controller is used by the * {@link com.iota.iri.MilestoneTracker} to manipulate a {@link Milestone} object. */ public class MilestoneViewModel { private final Milestone milestone; private static final Map<Integer, MilestoneViewModel> milestones = new ConcurrentHashMap<>(); private MilestoneViewModel(final Milestone milestone) { this.milestone = milestone; } /** * Removes the contents of the stored {@link Milestone} object set. */ public static void clear() { milestones.clear(); } /** * This method removes a {@link MilestoneViewModel} from the cache. * * It is used by the {@link com.iota.iri.service.transactionpruning.TransactionPruner} to remove milestones that * were deleted in the database, so that the runtime environment correctly reflects the database state. * * @param milestoneIndex the index of the milestone */ public static void clear(int milestoneIndex) { milestones.remove(milestoneIndex); } /** * Constructor for a {@link Milestone} set controller. This controller is generated from a finalized * {@link Milestone} hash identifier, indexing this object to the integer {@link Milestone} index. * * @param index The finalized numerical index the {@link Milestone} object will be referenced by in the set * @param milestoneHash The finalized {@link Hash} identifier for the {@link Milestone} object */ public MilestoneViewModel(final int index, final Hash milestoneHash) { this.milestone = new Milestone(); this.milestone.index = new IntegerIndex(index); milestone.hash = milestoneHash; } /** * Fetches an existing {@link MilestoneViewModel} if its index reference can be found in the controller. If the * {@link MilestoneViewModel} is null, but the indexed {@link Milestone} object exists in the database, a new * controller is created for the {@link Milestone} object. * * @param tangle The tangle reference for the database * @param index The integer index of the {@link Milestone} object that the controller should be returned for * @return The {@link MilestoneViewModel} for the indexed {@link Milestone} object * @throws Exception Thrown if the database fails to load the indexed {@link Milestone} object */ public static MilestoneViewModel get(Tangle tangle, int index) throws Exception { MilestoneViewModel milestoneViewModel = milestones.get(index); if(milestoneViewModel == null && load(tangle, index)) { milestoneViewModel = milestones.get(index); } return milestoneViewModel; } /** * Fetches a {@link Milestone} object from the database using its integer index. If the {@link Milestone} and the * associated {@link Hash} identifier are not null, a new {@link MilestoneViewModel} is created for the * {@link Milestone} object, and it is placed into the <tt>Milestones</tt> set, indexed by the provided integer * index. * * @param tangle The tangle reference for the database * @param index The integer index reference for the {@link Milestone} object * @return True if the {@link Milestone} object is stored in the <tt>Milestones</tt> set, False if not * @throws Exception Thrown if the database fails to load the {@link Milestone} object */ public static boolean load(Tangle tangle, int index) throws Exception { Milestone milestone = (Milestone) tangle.load(Milestone.class, new IntegerIndex(index)); if(milestone != null && milestone.hash != null) { milestones.put(index, new MilestoneViewModel(milestone)); return true; } return false; } /** * Fetches the first persistable {@link Milestone} object from the database and generates a new * {@link MilestoneViewModel} from it. If no {@link Milestone} objects exist in the database, it will return null. * * @param tangle the tangle reference for the database * @return The new {@link MilestoneViewModel} * @throws Exception Thrown if the database fails to return a first object */ public static MilestoneViewModel first(Tangle tangle) throws Exception { Pair<Indexable, Persistable> milestonePair = tangle.getFirst(Milestone.class, IntegerIndex.class); if(milestonePair != null && milestonePair.hi != null) { Milestone milestone = (Milestone) milestonePair.hi; return new MilestoneViewModel(milestone); } return null; } /** * Fetches the most recent persistable {@link Milestone} object from the database and generates a new * {@link MilestoneViewModel} from it. If no {@link Milestone} objects exist in the database, it will return null. * * @param tangle the tangle reference for the database * @return The new {@link MilestoneViewModel} * @throws Exception Thrown if the database fails to return a first object */ public static MilestoneViewModel latest(Tangle tangle) throws Exception { Pair<Indexable, Persistable> milestonePair = tangle.getLatest(Milestone.class, IntegerIndex.class); if(milestonePair != null && milestonePair.hi != null) { Milestone milestone = (Milestone) milestonePair.hi; return new MilestoneViewModel(milestone); } return null; } /** * Fetches the previously indexed persistable {@link Milestone} object from the database and generates a new * {@link MilestoneViewModel} from it. If no {@link Milestone} objects exist in the database, it will return null. * * @param tangle the tangle reference for the database * @return The new {@link MilestoneViewModel} * @throws Exception Thrown if the database fails to return a first object */ public MilestoneViewModel previous(Tangle tangle) throws Exception { Pair<Indexable, Persistable> milestonePair = tangle.previous(Milestone.class, this.milestone.index); if(milestonePair != null && milestonePair.hi != null) { Milestone milestone = (Milestone) milestonePair.hi; return new MilestoneViewModel((Milestone) milestone); } return null; } /** * Fetches the next indexed persistable {@link Milestone} object from the database and generates a new * {@link MilestoneViewModel} from it. If no {@link Milestone} objects exist in the database, it will return null. * * @param tangle The tangle reference for the database * @return The new {@link MilestoneViewModel} * @throws Exception Thrown if the database fails to return a first object */ public MilestoneViewModel next(Tangle tangle) throws Exception { Pair<Indexable, Persistable> milestonePair = tangle.next(Milestone.class, this.milestone.index); if(milestonePair != null && milestonePair.hi != null) { Milestone milestone = (Milestone) milestonePair.hi; return new MilestoneViewModel((Milestone) milestone); } return null; } /** * Fetches a {@link MilestoneViewModel} for the closest {@link Milestone} object previously indexed in the * database. The method starts at the provided index and works backwards through the database to try and find a * {@link MilestoneViewModel} for the previous indexes until a non null controller is found. * * @param tangle The tangle reference for the database * @param index The beginning index the method will work backwards from * @param minIndex The minimum index that should be found in the database * @return The {@link MilestoneViewModel} of the closest found controller previously indexed in the database * @throws Exception Thrown if there is a failure to fetch a previous {@link MilestoneViewModel} */ public static MilestoneViewModel findClosestPrevMilestone(Tangle tangle, int index, int minIndex) throws Exception { // search for the previous milestone preceding our index MilestoneViewModel previousMilestoneViewModel = null; int currentIndex = index; while(previousMilestoneViewModel == null && --currentIndex >= minIndex) { previousMilestoneViewModel = MilestoneViewModel.get(tangle, currentIndex); } return previousMilestoneViewModel; } /** * This method looks for the next milestone after a given index. * * In contrast to the {@link #next} method we do not rely on the insertion order in the database but actively search * for the milestone that was issued next by the coordinator (coo-order preserved). * * @param tangle Tangle object which acts as a database interface * @param index milestone index where the search shall start * @param maxIndex milestone index where the search shall stop * @return the milestone which follows directly after the given index or null if none was found * @throws Exception if anything goes wrong while loading entries from the database */ public static MilestoneViewModel findClosestNextMilestone(Tangle tangle, int index, int maxIndex) throws Exception { // search for the next milestone following our index MilestoneViewModel nextMilestoneViewModel = null; int currentIndex = index; while(nextMilestoneViewModel == null && ++currentIndex <= maxIndex) { nextMilestoneViewModel = MilestoneViewModel.get(tangle, currentIndex); } return nextMilestoneViewModel; } /** * Save the {@link Milestone} object, indexed by its integer index, to the database. * * @param tangle The tangle reference for the database * @return True if the {@link Milestone} object is saved correctly, False if not * @throws Exception Thrown if there is an error while saving the {@link Milestone} object */ public boolean store(Tangle tangle) throws Exception { return tangle.save(milestone, milestone.index); } /**@return The {@link Hash} identifier of the {@link Milestone} object*/ public Hash getHash() { return milestone.hash; } /**@return The integer index of the {@link Milestone} object*/ public Integer index() { return milestone.index.getValue(); } /** * Removes the {@link Milestone} object from the database. * * @param tangle The tangle reference for the database * @throws Exception Thrown if there is an error removing the {@link Milestone} object */ public void delete(Tangle tangle) throws Exception { tangle.delete(Milestone.class, milestone.index); } /** * This method creates a human readable string representation of the milestone. * * It can be used to directly append the milestone in error and debug messages. * * @return human readable string representation of the milestone */ @Override public String toString() { return "milestone #" + index() + " (" + getHash().toString() + ")"; } }
11,600
44.853755
120
java
iri
iri-master/src/main/java/com/iota/iri/controllers/StateDiffViewModel.java
package com.iota.iri.controllers; import com.iota.iri.model.Hash; import com.iota.iri.model.StateDiff; import com.iota.iri.storage.Tangle; import java.util.Map; /** * Acts as a controller interface for a {@link StateDiff}. This controller is used to manipulate a {@link StateDiff} * mapping of {@link com.iota.iri.model.persistables.Hashes} to <tt>Balance</tt> states in the database. */ public class StateDiffViewModel { private StateDiff stateDiff; private Hash hash; /** * Creates a {@link StateDiff} controller using a {@link Hash} identifier as a reference point. A {@link StateDiff} * is loaded from the database using the {@link Hash} reference, and the {@link Hash} identifier is set as the * controller reference as well. * * @param tangle The tangle reference for the database * @param hash The {@link Hash} identifier of the {@link StateDiff} the controller will be created for * @return The new {@link StateDiffViewModel} * @throws Exception Thrown if there is an error loading the {@link StateDiff} from the database */ public static StateDiffViewModel load(Tangle tangle, Hash hash) throws Exception { return new StateDiffViewModel((StateDiff) tangle.load(StateDiff.class, hash), hash); } /** * Constructor for a {@link StateDiff} controller using a predefined {@link StateDiff} mapping. The {@link Hash} * identifier is assigned as a reference for the controller, and the state is stored in the controller. * * @param state The {@link StateDiff} mapping that the controller will be made for * @param hash The reference {@link Hash} identifier */ public StateDiffViewModel(final Map<Hash, Long> state, final Hash hash) { this.hash = hash; this.stateDiff = new StateDiff(); this.stateDiff.state = state; } /** * This method checks the {@link com.iota.iri.storage.rocksDB.RocksDBPersistenceProvider} to determine if an object * might exist in the database. If it definitively does not exist, it will return False * * @param tangle The tangle reference for the database * @param hash The {@link Hash} identifier of the object you are looking for * @return True if the key might exist in the database, False if it definitively does not * @throws Exception Thrown if there is an error checking the database */ public static boolean maybeExists(Tangle tangle, Hash hash) throws Exception { return tangle.maybeHas(StateDiff.class, hash); } /** * Creates a finalized {@link StateDiff} controller. The referenced {@link StateDiff} of this controller and its * reference {@link Hash} identifier cannot be modified. If the provided {@link StateDiff} is null, an empty * {@link StateDiff} will be created. * * @param diff The finalized {@link StateDiff} the controller will be made for * @param hash The finalized {@link Hash} identifier of the controller */ private StateDiffViewModel(final StateDiff diff, final Hash hash) { this.hash = hash; this.stateDiff = diff == null || diff.state == null ? new StateDiff(): diff; } /**@return True if the {@link StateDiff} is empty, False if there is a variable present*/ public boolean isEmpty() { return stateDiff == null || stateDiff.state == null || stateDiff.state.size() == 0; } /**@return The {@link Hash} identifier of the {@link StateDiff} controller */ public Hash getHash() { return hash; } /**@return The {@link StateDiff} map of the controller*/ public Map<Hash, Long> getDiff() { return stateDiff.state; } /** * Saves the {@link StateDiff} and referencing {@link Hash} identifier to the database. * * @param tangle The tangle reference for the database * @return True if the {@link StateDiff} was saved correctly, False if not * @throws Exception Thrown if there is an error while saving the {@link StateDiff} */ public boolean store(Tangle tangle) throws Exception { //return Tangle.instance().save(stateDiff, hash).get(); return tangle.save(stateDiff, hash); } /** * Deletes the {@link StateDiff} and referencing {@link Hash} identifier from the database. * * @param tangle The tangle reference for the database * @throws Exception Thrown if there is an error while removing the {@link StateDiff} */ public void delete(Tangle tangle) throws Exception { tangle.delete(StateDiff.class, hash); } }
4,595
41.953271
119
java
iri
iri-master/src/main/java/com/iota/iri/controllers/TagViewModel.java
package com.iota.iri.controllers; import com.iota.iri.model.Hash; import com.iota.iri.model.TagHash; import com.iota.iri.model.persistables.ObsoleteTag; import com.iota.iri.model.persistables.Tag; import com.iota.iri.storage.Indexable; import com.iota.iri.storage.Persistable; import com.iota.iri.storage.Tangle; import com.iota.iri.utils.Pair; import java.util.Set; /** * Acts as a controller interface for a {@link Tag} set. These controllers are used within a * {@link TransactionViewModel} to manipulate a {@link Tag} set. */ public class TagViewModel implements HashesViewModel { private Tag self; private Indexable hash; /** * Creates an empty <tt>Tag</tt> set controller. This controller is created using a given hash identifier. * * @param hash The hash identifier that the {@link TagViewModel} will be referenced by */ public TagViewModel(Hash hash) { this.hash = hash; } /** * Constructor for a {@link Tag} set controller from an existing {@link Tag} set. If the set is empty, a new * {@link Tag} set is created. * * @param hashes The {@link Tag} set that the controller will be created from * @param hash The {@link Hash} identifier that acts as a reference for the {@link Tag} set */ private TagViewModel(Tag hashes, Indexable hash) { self = hashes == null || hashes.set == null ? new Tag(): hashes; this.hash = hash; } /** * Creates a new {@link Tag} set controller by converting a {@link com.iota.iri.model.persistables.Hashes} * referenced by the provided {@link Hash} identifer. This controller is generated by extracting the * {@link com.iota.iri.model.persistables.Hashes} set from the database using the {@link Hash} identifier * and casting this set to a {@link Tag} set. This set is then paired with the {@link Hash} identifier to create * and return a new {@link TagViewModel}. * * * @param tangle The tangle reference for the database to find the {@link Tag} set in * @param hash The hash identifier for the {@link Tag} set that needs to be found * @param model The provided {@link Hash} set to be converted * @return The {@link TagViewModel} controller generated * @throws Exception Thrown if the database cannot load an {@link Tag} set from the reference {@link Hash} */ private static TagViewModel load(Tangle tangle, Indexable hash, Class<? extends Tag> model) throws Exception { return new TagViewModel((Tag) tangle.load(model, hash), hash); } /** * Creates a new {@link Tag} set controller. This controller is created by extracting the {@link Tag} set * from the database using the provided {@link Hash} identifier. * * @param tangle The tangle reference for the database to find the {@link Tag} set in * @param hash The hash identifier for the {@link Tag} set that needs to be found * @return The {@link TagViewModel} controller generated * @throws Exception Thrown if the database cannot load an {@link Tag} set from the reference {@link Hash} */ public static TagViewModel load(Tangle tangle, Indexable hash) throws Exception { return load(tangle, hash, Tag.class); } /** * Creates a new {@link ObsoleteTag} set controller. This controller is created by loading the {@link ObsoleteTag} * set referenced by the {@link Hash} identifier from the database, and loading a new {@link TagViewModel} for it. * * @param tangle The tangle reference for the database to find the {@link Tag} set in * @param hash The hash identifier for the {@link Tag} set that needs to be found * @return The {@link TagViewModel} controller generated * @throws Exception Thrown if the database cannot load an {@link Tag} set from the reference {@link Hash} */ public static TagViewModel loadObsolete(Tangle tangle, Indexable hash) throws Exception { return load(tangle, hash, ObsoleteTag.class); } /** * Fetches the first persistable {@link Tag} set from the database and generates a new * {@link TagViewModel} from it. If no {@link Tag} sets exist in the database, it will return null. * * @param tangle the tangle reference for the database * @return The new {@link TagViewModel} * @throws Exception Thrown if the database fails to return a first object */ public static TagViewModel first(Tangle tangle) throws Exception { Pair<Indexable, Persistable> tagPair = tangle.getFirst(Tag.class, TagHash.class); if(tagPair != null && tagPair.hi != null) { return new TagViewModel((Tag) tagPair.hi, (Hash) tagPair.low); } return null; } @Override public boolean store(Tangle tangle) throws Exception { return tangle.save(self, hash); } @Override public int size() { return self.set.size(); } @Override public boolean addHash(Hash theHash) { return getHashes().add(theHash); } @Override public Indexable getIndex() { return hash; } @Override public Set<Hash> getHashes() { return self.set; } @Override public void delete(Tangle tangle) throws Exception { tangle.delete(Tag.class,hash); } @Override public TagViewModel next(Tangle tangle) throws Exception { Pair<Indexable, Persistable> tagPair = tangle.next(Tag.class, hash); if(tagPair != null && tagPair.hi != null) { return new TagViewModel((Tag) tagPair.hi, (Hash) tagPair.low); } return null; } }
5,648
38.78169
118
java
iri
iri-master/src/main/java/com/iota/iri/controllers/TipsViewModel.java
package com.iota.iri.controllers; import java.security.SecureRandom; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Set; import com.iota.iri.model.Hash; /** * Acts as a controller interface for a <tt>Tips</tt> set. A tips set is a a First In First Out cache for * {@link com.iota.iri.model.persistables.Transaction} objects that have no children. <tt>Tips</tt> are stored in the * {@link TipsViewModel} until they are deemed solid or are removed from the cache. */ public class TipsViewModel { /** The maximum size of the <tt>Tips</tt> set*/ public static final int MAX_TIPS = 5000; private final FifoHashCache<Hash> tips = new FifoHashCache<>(TipsViewModel.MAX_TIPS); private final FifoHashCache<Hash> solidTips = new FifoHashCache<>(TipsViewModel.MAX_TIPS); private final SecureRandom seed = new SecureRandom(); private final Object sync = new Object(); /** * Adds a {@link Hash} object to the tip cache in a synchronous fashion. * * @param hash The {@link Hash} identifier of the object to be added */ public void addTipHash(Hash hash) { synchronized (sync) { tips.add(hash); } } /** * Removes a {@link Hash} object from the tip cache in a synchronous fashion. * * @param hash The {@link Hash} identifier of the object to be removed */ public void removeTipHash(Hash hash) { synchronized (sync) { if (!tips.remove(hash)) { solidTips.remove(hash); } } } /** * Removes the referenced {@link Hash} object from the <tt>Tips</tt> cache and adds it to the <tt>SolidTips</tt> * cache. * * <p> * A solid tip is a transaction that has been stored in the database, and there are no missing transactions in its history. * </p> * * @param tip The {@link Hash} identifier for the object that will be set to solid */ public void setSolid(Hash tip) { synchronized (sync) { if (tips.remove(tip)) { solidTips.add(tip); } } } /** * Iterates through all solid and non-solid tips and compiles them into one {@link Hash} set to be returned. This * does so in a synchronised fashion. * * @return The {@link Hash} set containing all solid and non-solid tips */ public Set<Hash> getTips() { Set<Hash> hashes = new HashSet<>(); synchronized (sync) { Iterator<Hash> hashIterator; hashIterator = tips.iterator(); while (hashIterator.hasNext()) { hashes.add(hashIterator.next()); } hashIterator = solidTips.iterator(); while (hashIterator.hasNext()) { hashes.add(hashIterator.next()); } } return hashes; } /** * Returns a random tip by generating a random integer within the range of the <tt>SolidTips</tt> set, and iterates * through the set until a hash is returned. If there are no <tt>Solid</tt> tips available, then * <tt>getRandomNonSolidTipHash</tt> is called and returned instead. * * @return A random <tt>Solid</tt> tip if available, a random <tt>NonSolid</tt> tip if not */ public Hash getRandomSolidTipHash() { synchronized (sync) { int size = solidTips.size(); if (size == 0) { return getRandomNonSolidTipHash(); } int index = seed.nextInt(size); Iterator<Hash> hashIterator; hashIterator = solidTips.iterator(); Hash hash = null; while (index-- >= 0 && hashIterator.hasNext()) { hash = hashIterator.next(); } return hash; //return solidTips.size() != 0 ? solidTips.get(seed.nextInt(solidTips.size())) : getRandomNonSolidTipHash(); } } /** * Returns a random tip by generating integer within the range of the <tt>Tips</tt> set, and iterates through the * set until a hash is returned. If there are no tips available, then null is returned instead. * * @return A random tip if available, null if not */ public Hash getRandomNonSolidTipHash() { synchronized (sync) { int size = tips.size(); if (size == 0) { return null; } int index = seed.nextInt(size); Iterator<Hash> hashIterator; hashIterator = tips.iterator(); Hash hash = null; while (index-- >= 0 && hashIterator.hasNext()) { hash = hashIterator.next(); } return hash; //return tips.size() != 0 ? tips.get(seed.nextInt(tips.size())) : null; } } /** * Fetches the size of the <tt>Tips</tt> set in a synchronised fashion * @return The size of the set */ public int nonSolidSize() { synchronized (sync) { return tips.size(); } } /** * Fetches the size of the <tt>SolidTips</tt> set in a synchronised fashion * @return The size of the set */ public int solidSize() { synchronized (sync) { return solidTips.size(); } } /** * Fetches the size of the <tt>Tips</tt> set and <tt>SolidTips</tt>set combined. This does so in a synchronised * fashion. * @return The size of both sets combined */ public int size() { synchronized (sync) { return tips.size() + solidTips.size(); } } /** * A First In First Out hash set for storing <tt>Tip</tt> transactions. * * @param <K> The class of object that will be stored in the hash set */ private class FifoHashCache<K> { private final int capacity; private final LinkedHashSet<K> set; /** * Constructor for a <tt>Fifo LinkedHashSet Hash</tt> set of a given size. * * @param capacity The maximum size allocated for the set */ public FifoHashCache(int capacity) { this.capacity = capacity; this.set = new LinkedHashSet<>(); } /** * Determines if there is vacant space available in the set, and adds the provided object to the set if so. If * there is no space available, the set removes the next available object iteratively until there is room * available. * * @param key The {@link Hash} identifier for the object that will be added to the set * @return True if the new objects have been added, False if not */ public boolean add(K key) { int vacancy = this.capacity - this.set.size(); if (vacancy <= 0) { Iterator<K> it = this.set.iterator(); for (int i = vacancy; i <= 0; i++) { it.next(); it.remove(); } } return this.set.add(key); } /** * Removes the referenced object from the set. * * @param key The {@link Hash} identifier for the object that will be removed from the set * @return True if the object is removed, False if not */ public boolean remove(K key) { return this.set.remove(key); } /**@return The integer size of the stored {@link Hash} set*/ public int size() { return this.set.size(); } /** * Creates a new iterator for the object set based on the {@link Hash} class of the set. * @return The class matched iterator for the stored set*/ public Iterator<K> iterator() { return this.set.iterator(); } } }
7,885
32.415254
131
java
iri
iri-master/src/main/java/com/iota/iri/controllers/TransactionViewModel.java
package com.iota.iri.controllers; import com.iota.iri.model.*; import com.iota.iri.model.persistables.*; import com.iota.iri.service.snapshot.Snapshot; import com.iota.iri.service.validation.TransactionValidator; import com.iota.iri.storage.Indexable; import com.iota.iri.storage.Persistable; import com.iota.iri.storage.Tangle; import com.iota.iri.utils.Converter; import com.iota.iri.utils.Pair; import java.util.*; /** * Controller class for {@link Transaction} sets. A {@link TransactionViewModel} stores a {@link HashesViewModel} for * each component of the {@link Transaction} within it. */ public class TransactionViewModel { private final Transaction transaction; /** Length of a transaction object in trytes */ public static final int SIZE = 1604; private static final int TAG_SIZE_IN_BYTES = 17; // = ceil(81 TRITS / 5 TRITS_PER_BYTE) /** Total supply of IOTA available in the network. Used for ensuring a balanced ledger state and bundle balances */ public static final long SUPPLY = 2779530283277761L; // = (3^33 - 1) / 2 /** The predefined offset position and size (in trits) for the varying components of a transaction object */ public static final int SIGNATURE_MESSAGE_FRAGMENT_TRINARY_OFFSET = 0, SIGNATURE_MESSAGE_FRAGMENT_TRINARY_SIZE = 6561; public static final int ADDRESS_TRINARY_OFFSET = SIGNATURE_MESSAGE_FRAGMENT_TRINARY_OFFSET + SIGNATURE_MESSAGE_FRAGMENT_TRINARY_SIZE, ADDRESS_TRINARY_SIZE = 243; public static final int VALUE_TRINARY_OFFSET = ADDRESS_TRINARY_OFFSET + ADDRESS_TRINARY_SIZE, VALUE_TRINARY_SIZE = 81, VALUE_USABLE_TRINARY_SIZE = 33; public static final int OBSOLETE_TAG_TRINARY_OFFSET = VALUE_TRINARY_OFFSET + VALUE_TRINARY_SIZE, OBSOLETE_TAG_TRINARY_SIZE = 81; public static final int TIMESTAMP_TRINARY_OFFSET = OBSOLETE_TAG_TRINARY_OFFSET + OBSOLETE_TAG_TRINARY_SIZE, TIMESTAMP_TRINARY_SIZE = 27; public static final int CURRENT_INDEX_TRINARY_OFFSET = TIMESTAMP_TRINARY_OFFSET + TIMESTAMP_TRINARY_SIZE, CURRENT_INDEX_TRINARY_SIZE = 27; public static final int LAST_INDEX_TRINARY_OFFSET = CURRENT_INDEX_TRINARY_OFFSET + CURRENT_INDEX_TRINARY_SIZE, LAST_INDEX_TRINARY_SIZE = 27; public static final int BUNDLE_TRINARY_OFFSET = LAST_INDEX_TRINARY_OFFSET + LAST_INDEX_TRINARY_SIZE, BUNDLE_TRINARY_SIZE = 243; public static final int TRUNK_TRANSACTION_TRINARY_OFFSET = BUNDLE_TRINARY_OFFSET + BUNDLE_TRINARY_SIZE, TRUNK_TRANSACTION_TRINARY_SIZE = 243; public static final int BRANCH_TRANSACTION_TRINARY_OFFSET = TRUNK_TRANSACTION_TRINARY_OFFSET + TRUNK_TRANSACTION_TRINARY_SIZE, BRANCH_TRANSACTION_TRINARY_SIZE = 243; public static final int TAG_TRINARY_OFFSET = BRANCH_TRANSACTION_TRINARY_OFFSET + BRANCH_TRANSACTION_TRINARY_SIZE, TAG_TRINARY_SIZE = 81; public static final int ATTACHMENT_TIMESTAMP_TRINARY_OFFSET = TAG_TRINARY_OFFSET + TAG_TRINARY_SIZE, ATTACHMENT_TIMESTAMP_TRINARY_SIZE = 27; public static final int ATTACHMENT_TIMESTAMP_LOWER_BOUND_TRINARY_OFFSET = ATTACHMENT_TIMESTAMP_TRINARY_OFFSET + ATTACHMENT_TIMESTAMP_TRINARY_SIZE, ATTACHMENT_TIMESTAMP_LOWER_BOUND_TRINARY_SIZE = 27; public static final int ATTACHMENT_TIMESTAMP_UPPER_BOUND_TRINARY_OFFSET = ATTACHMENT_TIMESTAMP_LOWER_BOUND_TRINARY_OFFSET + ATTACHMENT_TIMESTAMP_LOWER_BOUND_TRINARY_SIZE, ATTACHMENT_TIMESTAMP_UPPER_BOUND_TRINARY_SIZE = 27; private static final int NONCE_TRINARY_OFFSET = ATTACHMENT_TIMESTAMP_UPPER_BOUND_TRINARY_OFFSET + ATTACHMENT_TIMESTAMP_UPPER_BOUND_TRINARY_SIZE, NONCE_TRINARY_SIZE = 81; public static final int TRINARY_SIZE = NONCE_TRINARY_OFFSET + NONCE_TRINARY_SIZE; public static final int TRYTES_SIZE = TRINARY_SIZE / 3; public static final int ESSENCE_TRINARY_OFFSET = ADDRESS_TRINARY_OFFSET, ESSENCE_TRINARY_SIZE = ADDRESS_TRINARY_SIZE + VALUE_TRINARY_SIZE + OBSOLETE_TAG_TRINARY_SIZE + TIMESTAMP_TRINARY_SIZE + CURRENT_INDEX_TRINARY_SIZE + LAST_INDEX_TRINARY_SIZE; /** Stores the {@link HashesViewModel} for the {@link Transaction} components here */ private AddressViewModel address; private ApproveeViewModel approovers; private TransactionViewModel trunk; private TransactionViewModel branch; private final Hash hash; /** Transaction Types */ public final static int GROUP = 0; // transactions GROUP means that's it's a non-leaf node (leafs store transaction // value) public final static int PREFILLED_SLOT = 1; // means that we know only hash of the tx, the rest is unknown yet: only // another tx references that hash public final static int FILLED_SLOT = -1; // knows the hash only coz another tx references that hash private byte[] trits; public int weightMagnitude; /** * Populates the meta data of the {@link TransactionViewModel}. If the controller {@link Hash} identifier is null, * it will return with no response. If the {@link Transaction} object has not been parsed, and the * {@link TransactionViewModel} type is <tt>FILLED_SLOT</tt>, the saved metadata batch will be saved to the * database. * * @param tangle The tangle reference for the database. * @param transactionViewModel The {@link TransactionViewModel} whose Metadata is to be filled. * @throws Exception Thrown if the database fails to save the batch of data. */ public static void fillMetadata(Tangle tangle, TransactionViewModel transactionViewModel) throws Exception { if (transactionViewModel.getType() == FILLED_SLOT && !transactionViewModel.transaction.parsed.get()) { tangle.saveBatch(transactionViewModel.getMetadataSaveBatch()); } } /** * Creates a new controller using a byte array that will be converted to a {@link Hash} identifier. The new * {@link TransactionViewModel} is created for the {@link Transaction} object referenced by this {@link Hash} * identifier, provided the {@link Transaction} exists in the database. * * @param tangle The tangle reference for the database * @param hash The source that the {@link Hash} identifier will be created from, and the {@link Transaction} * object will be fetched from the database from * @return The {@link TransactionViewModel} with its Metadata filled in. * @throws Exception Thrown if the database fails to find the {@link Transaction} object */ public static TransactionViewModel find(Tangle tangle, byte[] hash) throws Exception { TransactionViewModel transactionViewModel = new TransactionViewModel( (Transaction) tangle.find(Transaction.class, hash), HashFactory.TRANSACTION.create(hash)); fillMetadata(tangle, transactionViewModel); return transactionViewModel; } /** * Creates a new controller for a {@link Transaction} set referenced by a given {@link Hash} identifier. The * controller will be created and its metadata filled provided the {@link Transaction} object exists in the * database. * * @param tangle The tangle reference for the database * @param hash The {@link Hash} identifier to search with * @return The {@link TransactionViewModel} with its Metadata filled in. * @throws Exception Thrown if there is an error loading the {@link Transaction} object from the database */ public static TransactionViewModel fromHash(Tangle tangle, final Hash hash) throws Exception { TransactionViewModel transactionViewModel = new TransactionViewModel( (Transaction) tangle.load(Transaction.class, hash), hash); fillMetadata(tangle, transactionViewModel); return transactionViewModel; } /** * Constructor for a {@link Transaction} set controller interface. This controller is used to interact with and * manipulate a provided {@link Transaction} set. * * @param transaction {@link Transaction} set that the {@link TransactionViewModel} will be created for * @param hash The {@link Hash} identifier of the {@link Transaction} set */ public TransactionViewModel(final Transaction transaction, Hash hash) { this.transaction = transaction == null || transaction.bytes == null ? new Transaction() : transaction; this.hash = hash == null ? Hash.NULL_HASH : hash; weightMagnitude = this.hash.trailingZeros(); } /** * Constructor for a {@link Transaction} set controller interface. A new {@link Transaction} set is created from the * provided trit array, if the array is of the correct size. If it is not the correct size, a new byte array of the * correct size is created, the trit array is copied into it, and the bytes are stored in the new * {@link Transaction} set. This {@link Transaction} set is then indexed by the provided {@link Hash} identifier. * * @param trits The input trits that the {@link Transaction} and {@link TransactionViewModel} will be created from. * @param hash The {@link TransactionHash} identifier of the {@link Transaction} set */ public TransactionViewModel(final byte[] trits, Hash hash) { transaction = new Transaction(); if (trits.length == 8019) { this.trits = new byte[trits.length]; System.arraycopy(trits, 0, this.trits, 0, trits.length); transaction.bytes = Converter.allocateBytesForTrits(trits.length); Converter.bytes(trits, 0, transaction.bytes, 0, trits.length); transaction.validity = 0; transaction.arrivalTime = 0; } else { transaction.bytes = new byte[SIZE]; System.arraycopy(trits, 0, transaction.bytes, 0, SIZE); } this.hash = hash; weightMagnitude = this.hash.trailingZeros(); transaction.type.set(FILLED_SLOT); } /** * This method checks the {@link com.iota.iri.storage.rocksDB.RocksDBPersistenceProvider} to determine if the * {@link Transaction} object might exist in the database. If it definitively does not exist, it will return False. * * @param tangle The tangle reference for the database * @param hash The {@link Hash} identifier of the object you are looking for * @return True if the key might exist in the database, False if it definitively does not * @throws Exception Thrown if there is an error checking the database */ public static boolean mightExist(Tangle tangle, Hash hash) throws Exception { return tangle.maybeHas(Transaction.class, hash); } /** * Determines whether the {@link Transaction} object exists in the database or not. * * @param tangle The tangle reference for the database. * @param hash The {@link Hash} identifier for the {@link Transaction} object * @return True if the transaction exists in the database, False if not * @throws Exception Thrown if there is an error determining if the transaction exists or not */ public static boolean exists(Tangle tangle, Hash hash) throws Exception { return tangle.exists(Transaction.class, hash); } /** * Returns the total number of {@link Transaction} objects stored in a database. * * @param tangle The tangle reference for the database. * @return The integer count of total {@link Transaction} objects in the database * @throws Exception Thrown if there is an error getting the count of objects */ public static int getNumberOfStoredTransactions(Tangle tangle) throws Exception { return tangle.getCount(Transaction.class).intValue(); } /** * Converts the given byte array to the a new trit array of length {@value TRINARY_SIZE}. * * @param transactionBytes The byte array to be converted to trits * @return The trit conversion of the byte array */ public static byte[] trits(byte[] transactionBytes) { byte[] trits; trits = new byte[TRINARY_SIZE]; if (transactionBytes != null) { Converter.getTrits(transactionBytes, trits); } return trits; } public static Set<Indexable> getMissingTransactions(Tangle tangle) throws Exception { return tangle.keysWithMissingReferences(Approvee.class, Transaction.class); } /** * Fetches the first persistable {@link Transaction} object from the database and generates a new * {@link TransactionViewModel} from it. If no objects exist in the database, it will return null. * * @param tangle the tangle reference for the database. * @return The new {@link TransactionViewModel}. * @throws Exception Thrown if the database fails to return a first object. */ public static TransactionViewModel first(Tangle tangle) throws Exception { Pair<Indexable, Persistable> transactionPair = tangle.getFirst(Transaction.class, TransactionHash.class); if (transactionPair != null && transactionPair.hi != null) { return new TransactionViewModel((Transaction) transactionPair.hi, (Hash) transactionPair.low); } return null; } /** * This method updates the metadata contained in the {@link Transaction} object, and updates the object in the * database. First, all the most recent {@link Hash} identifiers are fetched to make sure the object's metadata is * up to date. Then it checks if the current {@link TransactionHash} is null. If it is, then the method immediately * returns false, and if not, it attempts to update the {@link Transaction} object and the referencing {@link Hash} * identifier in the database. * * @param tangle The tangle reference for the database * @param initialSnapshot snapshot that acts as genesis * @param item The string identifying the purpose of the update * @throws Exception Thrown if any of the metadata fails to fetch, or if the database update fails */ public void update(Tangle tangle, Snapshot initialSnapshot, String item) throws Exception { getAddressHash(); getTrunkTransactionHash(); getBranchTransactionHash(); getBundleHash(); getTagValue(); getObsoleteTagValue(); setAttachmentData(); setMetadata(); if (initialSnapshot.hasSolidEntryPoint(hash)) { return; } tangle.update(transaction, hash, item); } /** * Retrieves the {@link TransactionViewModel} for the branch {@link Transaction} object referenced by this * {@link TransactionViewModel}. If the controller doesn't already exist, a new one is created from the branch * transaction {@link Hash} identifier. * * @param tangle The tangle reference for the database. * @return The branch transaction {@link TransactionViewModel} * @throws Exception Thrown if no branch is found when creating the branch {@link TransactionViewModel} */ public TransactionViewModel getBranchTransaction(Tangle tangle) throws Exception { if (branch == null) { branch = TransactionViewModel.fromHash(tangle, getBranchTransactionHash()); } return branch; } /** * Retrieves the {@link TransactionViewModel} for the trunk {@link Transaction} object referenced by this * {@link TransactionViewModel}. If the controller doesn't already exist, a new one is created from the trunk * transaction {@link Hash} identifier. * * @param tangle The tangle reference for the database. * @return The trunk transaction {@link TransactionViewModel} * @throws Exception Thrown if no trunk is found when creating the trunk {@link TransactionViewModel} */ public TransactionViewModel getTrunkTransaction(Tangle tangle) throws Exception { if (trunk == null) { trunk = TransactionViewModel.fromHash(tangle, getTrunkTransactionHash()); } return trunk; } /** * @return The trits stored in the {@link TransactionViewModel} . If the trits aren't available in the controller, * the trits are generated from the transaction object and stored in the controller {@link #trits()} * variable. */ public synchronized byte[] trits() { return (trits == null) ? (trits = trits(transaction.bytes)) : trits; } /** * Deletes the {@link Transaction} object from the database * * @param tangle The tangle reference for the database * @throws Exception Thrown if there is an error removing the object */ public void delete(Tangle tangle) throws Exception { tangle.delete(Transaction.class, hash); } /** * Stores the {@link Transaction} object to the tangle, including the metadata and indexing based on {@link Bundle}, * {@link Address}, {@link Tag}, {@link #trunk} and {@link #branch}. * * @return The list of {@link Hash} objects indexed by the {@link TransactionHash} identifier. Returns False if * there is a problem populating the list. */ public List<Pair<Indexable, Persistable>> getMetadataSaveBatch() { List<Pair<Indexable, Persistable>> hashesList = new ArrayList<>(); hashesList.add(new Pair<>(getAddressHash(), new Address(hash))); hashesList.add(new Pair<>(getBundleHash(), new Bundle(hash))); hashesList.add(new Pair<>(getBranchTransactionHash(), new Approvee(hash))); hashesList.add(new Pair<>(getTrunkTransactionHash(), new Approvee(hash))); hashesList.add(new Pair<>(getObsoleteTagValue(), new ObsoleteTag(hash))); hashesList.add(new Pair<>(getTagValue(), new Tag(hash))); setAttachmentData(); setMetadata(); return hashesList; } /** * Fetches a list of all {@link Transaction} component and {@link Hash} identifier pairs from the stored metadata. * The method then ensures that the {@link Transaction#bytes} are present before adding the {@link Transaction} and * {@link Hash} identifier to the already compiled list of {@link Transaction} components. * * @return A complete list of all {@link Transaction} component objects paired with their {@link Hash} identifiers. * The transaction object itself must be the last item in the list. * @throws Exception Thrown if the metadata fails to fetch, or if the bytes are not retrieved correctly */ public List<Pair<Indexable, Persistable>> getSaveBatch() throws Exception { List<Pair<Indexable, Persistable>> hashesList = new ArrayList<>(); hashesList.addAll(getMetadataSaveBatch()); getBytes(); //must be last hashesList.add(new Pair<>(hash, transaction)); return hashesList; } /** * Fetches the next indexed persistable {@link Transaction} object from the database and generates a new * {@link TransactionViewModel} from it. If no objects exist in the database, it will return null. * * @param tangle the tangle reference for the database. * @return The new {@link TransactionViewModel}. * @throws Exception Thrown if the database fails to return a next object. */ public TransactionViewModel next(Tangle tangle) throws Exception { Pair<Indexable, Persistable> transactionPair = tangle.next(Transaction.class, hash); if (transactionPair != null && transactionPair.hi != null) { return new TransactionViewModel((Transaction) transactionPair.hi, (Hash) transactionPair.low); } return null; } /** * This method fetches the saved batch of metadata and orders them into a list of {@link Hash} objects and * {@link Hash} identifier pairs. If the {@link Hash} identifier of the {@link Transaction} is null, or the database * already contains the {@link Transaction}, then the method returns False. Otherwise, the method tries to store the * {@link Transaction} batch into the database. * * @param tangle The tangle reference for the database. * @param initialSnapshot snapshot that acts as genesis * @return True if the {@link Transaction} is stored, False if not. * @throws Exception Thrown if there is an error fetching the batch or storing in the database. */ public boolean store(Tangle tangle, Snapshot initialSnapshot) throws Exception { if (initialSnapshot.hasSolidEntryPoint(hash) || exists(tangle, hash)) { return false; } List<Pair<Indexable, Persistable>> batch = getSaveBatch(); if (exists(tangle, hash)) { return false; } return tangle.saveBatch(batch); } /** * Creates a copy of the underlying {@link Transaction} object. * * @return the transaction object */ public Transaction getTransaction() { Transaction t = new Transaction(); //if the supplied array to the call != null the transaction bytes are copied over from the buffer. t.read(getBytes()); t.readMetadata(transaction.metadata()); return t; } /** * Gets the {@link ApproveeViewModel} of a {@link Transaction}. If the current {@link ApproveeViewModel} is null, a * new one is created using the transaction {@link Hash} identifier. * * An {@link Approvee} is a transaction in the tangle that references, and therefore approves, this transaction * directly. * * @param tangle The tangle reference for the database * @return The {@link ApproveeViewModel} * @throws Exception Thrown if there is a failure to create a controller from the transaction hash */ public ApproveeViewModel getApprovers(Tangle tangle) throws Exception { if (approovers == null) { approovers = ApproveeViewModel.load(tangle, hash); } return approovers; } /** * Gets the {@link Transaction#type}. The type can be one of 3: * <ul> * <li>PREFILLED_SLOT: 1</li> * <li>FILLED_SLOT: -1</li> * <li>GROUP: 0</li> * </ul> * * @return The current type of the transaction. */ public final int getType() { return transaction.type.get(); } /** * Sets the {@link Transaction#arrivalTime}. * * @param time The time to be set in the {@link Transaction} */ public void setArrivalTime(long time) { transaction.arrivalTime = time; } /** @return The {@link Transaction#arrivalTime} */ public long getArrivalTime() { return transaction.arrivalTime; } /** * Gets the stored {@link Transaction#bytes}. If the {@link Transaction#bytes} are null, a new byte array is created * and stored from the {@link #trits}. If the {@link #trits} are also null, then a null byte array is returned. * * @return The stored {@link Transaction#bytes} array */ public byte[] getBytes() { if (transaction.bytes == null || transaction.bytes.length != SIZE) { transaction.bytes = new byte[SIZE]; if (trits != null) { Converter.bytes(trits(), 0, transaction.bytes, 0, trits().length); } } return transaction.bytes; } /** @return The transaction {@link Hash} identifier */ public Hash getHash() { return hash; } /** * Gets the {@link AddressViewModel} associated with this {@link Transaction}. * * @param tangle The tangle reference for the database. * @return The {@link AddressViewModel} of the {@link Transaction}. * @throws Exception If the address cannot be found in the database, an exception is thrown. */ public AddressViewModel getAddress(Tangle tangle) throws Exception { if (address == null) { address = AddressViewModel.load(tangle, getAddressHash()); } return address; } /** * Gets the {@link TagViewModel} associated with this {@link Transaction}. * * @param tangle The tangle reference for the database. * @return The {@link TagViewModel} of the {@link Transaction}. * @throws Exception If the address cannot be found in the database, an exception is thrown. */ public TagViewModel getTag(Tangle tangle) throws Exception { return TagViewModel.load(tangle, getTagValue()); } /** * Gets the {@link AddressHash} identifier of a {@link Transaction}. * * @return The {@link AddressHash} identifier. */ public Hash getAddressHash() { if (transaction.address == null) { transaction.address = HashFactory.ADDRESS.create(trits(), ADDRESS_TRINARY_OFFSET); } return transaction.address; } /** * Gets the {@link ObsoleteTagHash} identifier of a {@link Transaction}. * * @return The {@link ObsoleteTagHash} identifier. */ public Hash getObsoleteTagValue() { if (transaction.obsoleteTag == null) { byte[] tagBytes = Converter.allocateBytesForTrits(OBSOLETE_TAG_TRINARY_SIZE); Converter.bytes(trits(), OBSOLETE_TAG_TRINARY_OFFSET, tagBytes, 0, OBSOLETE_TAG_TRINARY_SIZE); transaction.obsoleteTag = HashFactory.OBSOLETETAG.create(tagBytes, 0, TAG_SIZE_IN_BYTES); } return transaction.obsoleteTag; } /** * Gets the {@link BundleHash} identifier of a {@link Transaction}. * * @return The {@link BundleHash} identifier. */ public Hash getBundleHash() { if (transaction.bundle == null) { transaction.bundle = HashFactory.BUNDLE.create(trits(), BUNDLE_TRINARY_OFFSET); } return transaction.bundle; } /** * Gets the trunk {@link TransactionHash} identifier of a {@link Transaction}. * * @return The trunk {@link TransactionHash} identifier. */ public Hash getTrunkTransactionHash() { if (transaction.trunk == null) { transaction.trunk = HashFactory.TRANSACTION.create(trits(), TRUNK_TRANSACTION_TRINARY_OFFSET); } return transaction.trunk; } /** * Gets the branch {@link TransactionHash} identifier of a {@link Transaction}. * * @return The branch {@link TransactionHash} identifier. */ public Hash getBranchTransactionHash() { if (transaction.branch == null) { transaction.branch = HashFactory.TRANSACTION.create(trits(), BRANCH_TRANSACTION_TRINARY_OFFSET); } return transaction.branch; } /** * Gets the {@link TagHash} identifier of a {@link Transaction}. * * @return The {@link TagHash} identifier. */ public Hash getTagValue() { if (transaction.tag == null) { byte[] tagBytes = Converter.allocateBytesForTrits(TAG_TRINARY_SIZE); Converter.bytes(trits(), TAG_TRINARY_OFFSET, tagBytes, 0, TAG_TRINARY_SIZE); transaction.tag = HashFactory.TAG.create(tagBytes, 0, TAG_SIZE_IN_BYTES); } return transaction.tag; } /** * Gets the {@link Transaction#attachmentTimestamp}. The <tt>Attachment Timestapm</tt> is used to show when a * transaction has been attached to the database. * * @return The {@link Transaction#attachmentTimestamp} */ public long getAttachmentTimestamp() { return transaction.attachmentTimestamp; } /** * Gets the {@link Transaction#attachmentTimestampLowerBound}. The <tt>Attachment Timestamp Lower Bound</tt> is the * earliest timestamp a transaction can have. * * @return The {@link Transaction#attachmentTimestampLowerBound} */ public long getAttachmentTimestampLowerBound() { return transaction.attachmentTimestampLowerBound; } /** * Gets the {@link Transaction#attachmentTimestampUpperBound}. The <tt>Attachment Timestamp Upper Bound</tt> is the * maximum timestamp a transaction can have. * * @return The {@link Transaction#attachmentTimestampUpperBound} */ public long getAttachmentTimestampUpperBound() { return transaction.attachmentTimestampUpperBound; } /** @return The {@link Transaction#value} */ public long value() { return transaction.value; } /** * Updates the {@link Transaction#validity} in the database. * * The validity can be one of three states: <tt>1: Valid; -1: Invalid; 0: Unknown</tt> * * @param tangle The tangle reference for the database * @param initialSnapshot snapshot that acts as genesis * @param validity The state of validity that the {@link Transaction} will be updated to * @throws Exception Thrown if there is an error with the update */ public void setValidity(Tangle tangle, Snapshot initialSnapshot, int validity) throws Exception { if (transaction.validity != validity) { transaction.validity = validity; update(tangle, initialSnapshot, "validity"); } } /** @return The current stored {@link Transaction#validity} */ public int getValidity() { return transaction.validity; } /** @return The {@link Transaction#currentIndex} in its bundle */ public long getCurrentIndex() { return transaction.currentIndex; } /** * Creates an array copy of the signature message fragment of the {@link Transaction} and returns it. * * @return The signature message fragment in array format. */ public byte[] getSignature() { return Arrays.copyOfRange(trits(), SIGNATURE_MESSAGE_FRAGMENT_TRINARY_OFFSET, SIGNATURE_MESSAGE_FRAGMENT_TRINARY_SIZE); } /** @return The stored {@link Transaction#timestamp} */ public long getTimestamp() { return transaction.timestamp; } /** * Creates a byte array of the size {@value NONCE_TRINARY_SIZE} and copies the nonce section of the transaction * trits into the array. This array is then returned. * * @return A byte array containing the nonce of the transaction */ public byte[] getNonce() { byte[] nonce = Converter.allocateBytesForTrits(NONCE_TRINARY_SIZE); Converter.bytes(trits(), NONCE_TRINARY_OFFSET, nonce, 0, trits().length); return nonce; } /** @return The {@link Transaction#lastIndex} of the transaction bundle */ public long lastIndex() { return transaction.lastIndex; } /** * Fetches the {@link Transaction#tag}, and converts the transaction trits for the * {@link Transaction#attachmentTimestamp}, the {@link Transaction#attachmentTimestampLowerBound}, and the * {@link Transaction#attachmentTimestampUpperBound} to long values.The method then sets these values to the * {@link TransactionViewModel} metadata. */ public void setAttachmentData() { getTagValue(); transaction.attachmentTimestamp = Converter.longValue(trits(), ATTACHMENT_TIMESTAMP_TRINARY_OFFSET, ATTACHMENT_TIMESTAMP_TRINARY_SIZE); transaction.attachmentTimestampLowerBound = Converter.longValue(trits(), ATTACHMENT_TIMESTAMP_LOWER_BOUND_TRINARY_OFFSET, ATTACHMENT_TIMESTAMP_LOWER_BOUND_TRINARY_SIZE); transaction.attachmentTimestampUpperBound = Converter.longValue(trits(), ATTACHMENT_TIMESTAMP_UPPER_BOUND_TRINARY_OFFSET, ATTACHMENT_TIMESTAMP_UPPER_BOUND_TRINARY_SIZE); } /** * Converts the {@link Transaction#value}, {@link Transaction#timestamp}, {@link Transaction#currentIndex} and * {@link Transaction#lastIndex} from trits to long values and assigns them to the {@link TransactionViewModel} * metadata. The method then determines if the {@link Transaction#bytes} are null or not. If so the * {@link Transaction#type} is set to {@link #PREFILLED_SLOT}, and if not it is set to {@link #FILLED_SLOT}. */ public void setMetadata() { transaction.value = Converter.longValue(trits(), VALUE_TRINARY_OFFSET, VALUE_USABLE_TRINARY_SIZE); transaction.timestamp = Converter.longValue(trits(), TIMESTAMP_TRINARY_OFFSET, TIMESTAMP_TRINARY_SIZE); // if (transaction.timestamp > 1262304000000L ) transaction.timestamp /= 1000L; // if > 01.01.2010 in // milliseconds transaction.currentIndex = Converter.longValue(trits(), CURRENT_INDEX_TRINARY_OFFSET, CURRENT_INDEX_TRINARY_SIZE); transaction.lastIndex = Converter.longValue(trits(), LAST_INDEX_TRINARY_OFFSET, LAST_INDEX_TRINARY_SIZE); transaction.type.set(transaction.bytes == null ? TransactionViewModel.PREFILLED_SLOT : TransactionViewModel.FILLED_SLOT); } /** * Updates the {@link Transaction#solid} value of the referenced {@link Transaction} object. * * Used by the {@link TransactionValidator} to quickly set the solidity of a {@link Transaction} set. * * @param solid The solidity of the transaction in the database * @return True if the {@link Transaction#solid} has been updated, False if not. */ public boolean updateSolid(boolean solid) throws Exception { if (solid != transaction.solid.get()) { transaction.solid.set(solid); return true; } return false; } /** @return True if {@link Transaction#solid} is True (exists in the database), False if not */ public boolean isSolid() { return transaction.solid.get(); } /** @return The {@link Transaction#snapshot} index */ public int snapshotIndex() { return transaction.snapshot.get(); } /** * Sets the current {@link Transaction#snapshot} index. * * This is used to set a milestone transactions index. * * @param tangle The tangle reference for the database. * @param initialSnapshot snapshot that acts as genesis * @param index The new index to be attached to the {@link Transaction} object * @throws Exception Thrown if the database update does not return correctly */ public void setSnapshot(Tangle tangle, Snapshot initialSnapshot, final int index) throws Exception { if (index != transaction.snapshot.get()) { transaction.snapshot.set(index); update(tangle, initialSnapshot, "snapshot"); } } /** * This method sets the {@link Transaction#milestone} flag. * * It gets automatically called by the {@link com.iota.iri.service.milestone.MilestoneSolidifier} and marks * transactions that represent a milestone accordingly. It first checks if the {@link Transaction#milestone} flag * has changed and if so, it issues a database update. * * @param tangle Tangle instance which acts as a database interface * @param initialSnapshot the snapshot representing the starting point of our ledger * @param isMilestone true if the transaction is a milestone and false otherwise * @throws Exception if something goes wrong while saving the changes to the database */ public void isMilestone(Tangle tangle, Snapshot initialSnapshot, final boolean isMilestone) throws Exception { if (isMilestone != transaction.milestone.get()) { transaction.milestone.set(isMilestone); update(tangle, initialSnapshot, "milestone"); } } /** * This method gets the {@link Transaction#milestone}. * * The {@link Transaction#milestone} flag indicates if the {@link Transaction} is a coordinator issued milestone. It * allows us to differentiate the two types of transactions (normal transactions / milestones) very fast and * efficiently without issuing further database queries or even full verifications of the signature. If it is set to * true one can for example use the snapshotIndex() method to retrieve the corresponding {@link MilestoneViewModel} * object. * * @return true if the {@link Transaction} is a milestone and false otherwise */ public boolean isMilestone() { return transaction.milestone.get(); } /** @return The current {@link Transaction#height} */ public long getHeight() { return transaction.height.get(); } /** * Updates the {@link Transaction#height}. * * @param height The new height of the {@link Transaction} */ private void updateHeight(long height) throws Exception { transaction.height.set(height); } public void updateHeights(Tangle tangle, Snapshot initialSnapshot) throws Exception { TransactionViewModel transactionVM = this, trunk = this.getTrunkTransaction(tangle); Stack<Hash> transactionViewModels = new Stack<>(); transactionViewModels.push(transactionVM.getHash()); while (trunk.getHeight() == 0 && trunk.getType() != PREFILLED_SLOT && !initialSnapshot.hasSolidEntryPoint(trunk.getHash())) { transactionVM = trunk; trunk = transactionVM.getTrunkTransaction(tangle); transactionViewModels.push(transactionVM.getHash()); } while (transactionViewModels.size() != 0) { transactionVM = TransactionViewModel.fromHash(tangle, transactionViewModels.pop()); long currentHeight = transactionVM.getHeight(); if (initialSnapshot.hasSolidEntryPoint(trunk.getHash()) && trunk.getHeight() == 0 && !initialSnapshot.hasSolidEntryPoint(transactionVM.getHash())) { if (currentHeight != 1L) { transactionVM.updateHeight(1L); transactionVM.update(tangle, initialSnapshot, "height"); } } else if (trunk.getType() != PREFILLED_SLOT && transactionVM.getHeight() == 0) { long newHeight = 1L + trunk.getHeight(); if (currentHeight != newHeight) { transactionVM.updateHeight(newHeight); transactionVM.update(tangle, initialSnapshot, "height"); } } else { break; } trunk = transactionVM; } } /** * Updates the {@link Transaction#sender}. * * @param sender The sender of the {@link Transaction} */ public void updateSender(String sender) throws Exception { transaction.sender.set(sender); } /** @return The {@link Transaction#sender} */ public String getSender() { return transaction.sender.get(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TransactionViewModel other = (TransactionViewModel) o; return Objects.equals(getHash(), other.getHash()); } @Override public int hashCode() { return Objects.hash(getHash()); } /** * This method creates a human readable string representation of the transaction. * * It can be used to directly append the transaction in error and debug messages. * * @return human readable string representation of the transaction */ @Override public String toString() { return "transaction " + hash.toString(); } }
39,549
43.689266
125
java
iri
iri-master/src/main/java/com/iota/iri/crypto/Curl.java
package com.iota.iri.crypto; import com.iota.iri.utils.Converter; import com.iota.iri.utils.Pair; import java.util.Arrays; import java.util.NoSuchElementException; /** * (c) 2016 Come-from-Beyond and Paul Handy * * Curl belongs to the sponge function family. * */ public class Curl implements Sponge { static final int NUMBER_OF_ROUNDSP81 = 81; static final int NUMBER_OF_ROUNDSP27 = 27; private final int numberOfRounds; private static final int STATE_LENGTH = 3 * HASH_LENGTH; private static final byte[] TRUTH_TABLE = {1, 0, -1, 2, 1, -1, 0, 2, -1, 1, 0}; private final byte[] state; private final long[] stateLow; private final long[] stateHigh; private final byte[] scratchpad = new byte[STATE_LENGTH]; protected Curl(SpongeFactory.Mode mode) { switch(mode) { case CURLP27: { numberOfRounds = NUMBER_OF_ROUNDSP27; } break; case CURLP81: { numberOfRounds = NUMBER_OF_ROUNDSP81; } break; default: throw new NoSuchElementException("Only Curl-P-27 and Curl-P-81 are supported."); } state = new byte[STATE_LENGTH]; stateHigh = null; stateLow = null; } @Override public void absorb(final byte[] trits, int offset, int length) { do { System.arraycopy(trits, offset, state, 0, length < HASH_LENGTH ? length : HASH_LENGTH); transform(); offset += HASH_LENGTH; } while ((length -= HASH_LENGTH) > 0); } @Override public void squeeze(final byte[] trits, int offset, int length) { do { System.arraycopy(state, 0, trits, offset, length < HASH_LENGTH ? length : HASH_LENGTH); transform(); offset += HASH_LENGTH; } while ((length -= HASH_LENGTH) > 0); } /** * Performs {@code numberOfRounds} Transformations on the internal state. */ private void transform() { int scratchpadIndex = 0; int prevScratchpadIndex = 0; for (int round = 0; round < numberOfRounds; round++) { System.arraycopy(state, 0, scratchpad, 0, STATE_LENGTH); for (int stateIndex = 0; stateIndex < STATE_LENGTH; stateIndex++) { prevScratchpadIndex = scratchpadIndex; if (scratchpadIndex < 365) { scratchpadIndex += 364; } else { scratchpadIndex += -365; } state[stateIndex] = TRUTH_TABLE[scratchpad[prevScratchpadIndex] + (scratchpad[scratchpadIndex] << 2) + 5]; } } } public void reset() { Arrays.fill(state, (byte) 0); } // BCURLT - pair Curl implementation. ///////////////////////////////////////Code not in use//////////////////////////////////////////////////// protected Curl(boolean pair, SpongeFactory.Mode mode) { switch(mode) { case CURLP27: { numberOfRounds = NUMBER_OF_ROUNDSP27; } break; case CURLP81: { numberOfRounds = NUMBER_OF_ROUNDSP81; } break; default: throw new NoSuchElementException("Only Curl-P-27 and Curl-P-81 are supported."); } if(pair) { stateHigh = new long[STATE_LENGTH]; stateLow = new long[STATE_LENGTH]; state = null; set(); } else { state = new byte[STATE_LENGTH]; stateHigh = null; stateLow = null; } } void reset(boolean pair) { if(pair) { set(); } else { reset(); } } private void set() { Arrays.fill(stateLow, Converter.HIGH_LONG_BITS); Arrays.fill(stateHigh, Converter.HIGH_LONG_BITS); } private void pairTransform() { final long[] curlScratchpadLow = new long[STATE_LENGTH]; final long[] curlScratchpadHigh = new long[STATE_LENGTH]; int curlScratchpadIndex = 0; for (int round = numberOfRounds; round-- > 0; ) { System.arraycopy(stateLow, 0, curlScratchpadLow, 0, STATE_LENGTH); System.arraycopy(stateHigh, 0, curlScratchpadHigh, 0, STATE_LENGTH); for (int curlStateIndex = 0; curlStateIndex < STATE_LENGTH; curlStateIndex++) { final long alpha = curlScratchpadLow[curlScratchpadIndex]; final long beta = curlScratchpadHigh[curlScratchpadIndex]; final long delta = alpha & (curlScratchpadLow[curlScratchpadIndex += (curlScratchpadIndex < 365 ? 364 : -365)] ^ beta); stateLow[curlStateIndex] = ~delta; stateHigh[curlStateIndex] = (alpha ^ curlScratchpadHigh[curlScratchpadIndex]) | delta; } } } void absorb(final Pair<long[], long[]> pair, int offset, int length) { int o = offset, l = length, i = 0; do { System.arraycopy(pair.low, o, stateLow, 0, l < HASH_LENGTH ? l : HASH_LENGTH); System.arraycopy(pair.hi, o, stateHigh, 0, l < HASH_LENGTH ? l : HASH_LENGTH); pairTransform(); o += HASH_LENGTH; } while ((l -= HASH_LENGTH) > 0); } Pair<long[], long[]> squeeze(Pair<long[], long[]> pair, int offset, int length) { int o = offset, l = length, i = 0; long[] low = pair.low; long[] hi = pair.hi; do { System.arraycopy(stateLow, 0, low, o, l < HASH_LENGTH ? l : HASH_LENGTH); System.arraycopy(stateHigh, 0, hi, o, l < HASH_LENGTH ? l : HASH_LENGTH); pairTransform(); o += HASH_LENGTH; } while ((l -= HASH_LENGTH) > 0); return new Pair<>(low, hi); } }
5,778
32.994118
135
java
iri
iri-master/src/main/java/com/iota/iri/crypto/ISS.java
package com.iota.iri.crypto; import java.util.Arrays; import com.iota.iri.model.Hash; /** * (c) 2016 Come-from-Beyond <br> * * IOTA Signature Scheme. <br> * Based on Winternitz One Time Signatures.<br> * * Popular usages:<br> * <b>Address(subseed)</b> * <pre> * key = key(subseed) * digests = digests(key) * return address(digests) * </pre> * * <b>Sign(message, subseed)</b> * <pre> * key = key(subseed) * messageHash = sponge.hash(message) * normalizedHash = normalizedBundle(messageHash) * return signatureFragment(normalizedHash, key) * </pre> * * <b>Verify(messageHash, signature, address)</b> * <pre> * normalizedHash = normalizedBundle(messageHash) * signatureDigests = digest(normalizedHash, signature) * return (address == address(signatureDigests)) * </pre> * */ public class ISS { public static final int NUMBER_OF_FRAGMENT_CHUNKS = 27; public static final int FRAGMENT_LENGTH = Curl.HASH_LENGTH * NUMBER_OF_FRAGMENT_CHUNKS; private static final int NUMBER_OF_SECURITY_LEVELS = 3; private static final int MIN_TRIT_VALUE = -1, MAX_TRIT_VALUE = 1; public static final int TRYTE_WIDTH = 3; private static final int MIN_TRYTE_VALUE = -13, MAX_TRYTE_VALUE = 13; public static final int NORMALIZED_FRAGMENT_LENGTH = Curl.HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS; ///////////////////////////////////////////////Key generation//////////////////////////////////////////////////// /** * Calculates a {@code subSeed} from a given {@code seed} and {@code index} * <pre> * subSeed = Hash(seed + index) * </pre> * * @param mode Hash function to be used * @param seed trit array of master seed * @param index incremental index of the subseed * @return the subseed */ public static byte[] subseed(SpongeFactory.Mode mode, final byte[] seed, int index) { if (index < 0) { throw new RuntimeException("Invalid subseed index: " + index); } final byte[] subseedPreimage = Arrays.copyOf(seed, seed.length); while (index-- > 0) { for (int i = 0; i < subseedPreimage.length; i++) { if (++subseedPreimage[i] > MAX_TRIT_VALUE) { subseedPreimage[i] = MIN_TRIT_VALUE; } else { break; } } } final byte[] subseed = new byte[Curl.HASH_LENGTH]; final Sponge hash = SpongeFactory.create(mode); hash.absorb(subseedPreimage, 0, subseedPreimage.length); hash.squeeze(subseed, 0, subseed.length); return subseed; } /** * Derives a secret key of length {@code FRAGMENT_LENGTH} * {@code numberOfFragments} from subseed. <br> * <pre> * key = squeeze(absorb(subseed), length = FRAGMENT_LENGTH * numberOfFragments) * </pre> * * @param mode Hash function to be used * @param subseed secret seed * @param numberOfFragments desired security level, [1, 2, 3] * @return secret key */ public static byte[] key(SpongeFactory.Mode mode, final byte[] subseed, final int numberOfFragments) { if (subseed.length != Curl.HASH_LENGTH) { throw new RuntimeException("Invalid subseed length: " + subseed.length); } if (numberOfFragments <= 0) { throw new RuntimeException("Invalid number of key fragments: " + numberOfFragments); } final byte[] key = new byte[FRAGMENT_LENGTH * numberOfFragments]; final Sponge hash = SpongeFactory.create(mode); hash.absorb(subseed, 0, subseed.length); hash.squeeze(key, 0, key.length); return key; } /** * Calculates the digest of the public key <br> * <pre> * n = NUMBER_OF_FRAGMENT_CHUNKS * pub_keys(1, .. , n) = (Hash^27(key_1), .. , Hash^27(key_n)) * digest = Hash(pub_keys) * * *process repeats for each security level (implicit from key.length) * </pre> * @param mode Hash function to be used * @param key secret key * @return digests of the public key */ public static byte[] digests(SpongeFactory.Mode mode, final byte[] key) { if (key.length == 0 || key.length % FRAGMENT_LENGTH != 0) { throw new RuntimeException("Invalid key length: " + key.length); } final byte[] digests = new byte[key.length / FRAGMENT_LENGTH * Curl.HASH_LENGTH]; final Sponge hash = SpongeFactory.create(mode); for (int i = 0; i < key.length / FRAGMENT_LENGTH; i++) { final byte[] buffer = Arrays.copyOfRange(key, i * FRAGMENT_LENGTH, (i + 1) * FRAGMENT_LENGTH); for (int j = 0; j < NUMBER_OF_FRAGMENT_CHUNKS; j++) { for (int k = MAX_TRYTE_VALUE - MIN_TRYTE_VALUE; k-- > 0;) { hash.reset(); hash.absorb(buffer, j * Curl.HASH_LENGTH, Curl.HASH_LENGTH); hash.squeeze(buffer, j * Curl.HASH_LENGTH, Curl.HASH_LENGTH); } } hash.reset(); hash.absorb(buffer, 0, buffer.length); hash.squeeze(digests, i * Curl.HASH_LENGTH, Curl.HASH_LENGTH); } return digests; } /** * Calculates the public address <br> * <pre> * address = hash(digests) * </pre> * @param mode Hash function to be used * @param digests digests of the public key * @return public address */ public static byte[] address(SpongeFactory.Mode mode, final byte[] digests) { if (digests.length == 0 || digests.length % Curl.HASH_LENGTH != 0) { throw new RuntimeException("Invalid digests length: " + digests.length); } final byte[] address = new byte[Curl.HASH_LENGTH]; final Sponge hash = SpongeFactory.create(mode); hash.absorb(digests, 0, digests.length); hash.squeeze(address, 0, address.length); return address; } /////////////////////////////////////////////////Signing//////////////////////////////////////////////////////// /** * Deterministically Normalize the bundle hash. <br> * <ol> * <li>map each tryte in {@code bundle} to balanced base-27 {@code [-13 , 13]} </li> * <li>sum all mapped trytes together</li> * <li>if sum != 0, start inc/dec each tryte till sum equals 0</li> * </ol> * * @param bundle hash to be normalized * @return normalized hash */ public static byte[] normalizedBundle(final byte[] bundle) { if (bundle.length != Curl.HASH_LENGTH) { throw new RuntimeException("Invalid bundleValidator length: " + bundle.length); } final byte[] normalizedBundle = new byte[Curl.HASH_LENGTH / TRYTE_WIDTH]; ISSInPlace.normalizedBundle(bundle, normalizedBundle); return normalizedBundle; } /** * Generates a signature for {@code normalizedBundleFragment} <br> * <pre> * M = normalizedBundleFragment * sig(1, .. , n) = (Hash^(M_1)(key_1), .. , Hash^(M_n)(key_n)) * </pre> * * @param mode Hash function to be used * @param normalizedBundleFragment normalized hash of the message to be signed * @param keyFragment secret key * @return signature */ public static byte[] signatureFragment(SpongeFactory.Mode mode, final byte[] normalizedBundleFragment, final byte[] keyFragment) { if (normalizedBundleFragment.length != NORMALIZED_FRAGMENT_LENGTH) { throw new RuntimeException( "Invalid normalized bundleValidator fragment length: " + normalizedBundleFragment.length); } if (keyFragment.length != FRAGMENT_LENGTH) { throw new RuntimeException("Invalid key fragment length: " + keyFragment.length); } final byte[] signatureFragment = Arrays.copyOf(keyFragment, keyFragment.length); final Sponge hash = SpongeFactory.create(mode); for (int j = 0; j < NUMBER_OF_FRAGMENT_CHUNKS; j++) { for (int k = MAX_TRYTE_VALUE - normalizedBundleFragment[j]; k-- > 0;) { hash.reset(); hash.absorb(signatureFragment, j * Curl.HASH_LENGTH, Curl.HASH_LENGTH); hash.squeeze(signatureFragment, j * Curl.HASH_LENGTH, Curl.HASH_LENGTH); } } return signatureFragment; } ////////////////////////////////////////////Validation////////////////////////////////////////////////// /** * recalculate the digest of the public key used to sign {@code signatureFragment}. <br> * <pre> * n = NUMBER_OF_FRAGMENT_CHUNKS * M = normalizedBundleFragment * * pub_keys'(1, .. , n) = (Hash^(27-M_1)(sig_1), .. , Hash^(27-M_n)(sig_n)) * *process repeats for each security level (implicit from key.length) * * return Hash(pub_keys') * </pre> * * @param mode Hash function to be used * @param normalizedBundleFragment normalized hash of the message signed * @param signatureFragment signature * @return digest of the public key used */ public static byte[] digest(SpongeFactory.Mode mode, final byte[] normalizedBundleFragment, final byte[] signatureFragment) { if (normalizedBundleFragment.length != Curl.HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS) { throw new RuntimeException( "Invalid normalized bundleValidator fragment length: " + normalizedBundleFragment.length); } if (signatureFragment.length != FRAGMENT_LENGTH) { throw new RuntimeException("Invalid signature fragment length: " + signatureFragment.length); } final byte[] digest = new byte[Curl.HASH_LENGTH]; ISSInPlace.digest(mode, normalizedBundleFragment, 0, signatureFragment, 0, digest); return digest; } /** * Calculates the {@code root} of a Merkle tree, given a leaf and Merkle path<br> * @see <a href="https://en.wikipedia.org/wiki/Merkle_tree">https://en.wikipedia.org/wiki/Merkle_tree</a><br> * * @param mode Hash function to be used * @param hash leaf of Merkle tree * @param trits Merkle path, the siblings of the leaf concatenated. * @param offset starting position in {@code trits} * @param indexIn leaf index (used to determine order of concatenation) * @param depth depth of Merkle tree * @return the root, the consecutive hashing of the leaf and the Merkle path. */ public static byte[] getMerkleRoot(SpongeFactory.Mode mode, byte[] hash, byte[] trits, int offset, final int indexIn, int depth) { int index = indexIn; final Sponge curl = SpongeFactory.create(mode); for (int i = 0; i < depth; i++) { curl.reset(); if ((index & 1) == 0) { curl.absorb(hash, 0, hash.length); curl.absorb(trits, offset + i * Curl.HASH_LENGTH, Curl.HASH_LENGTH); } else { curl.absorb(trits, offset + i * Curl.HASH_LENGTH, Curl.HASH_LENGTH); curl.absorb(hash, 0, hash.length); } curl.squeeze(hash, 0, hash.length); index >>= 1; } if (index != 0) { return Hash.NULL_HASH.trits(); } return hash; } }
11,563
35.711111
117
java
iri
iri-master/src/main/java/com/iota/iri/crypto/ISSInPlace.java
package com.iota.iri.crypto; import java.util.Arrays; /** * (c) 2016 Come-from-Beyond <br> * * IOTA Signature Scheme. <br> * Based on Winternitz One Time Signatures.<br> * Implemented in place - does not allocate memory. * * @see ISS ISS for popular usage */ public class ISSInPlace { public static final int NUMBER_OF_FRAGMENT_CHUNKS = 27; public static final int FRAGMENT_LENGTH = Kerl.HASH_LENGTH * NUMBER_OF_FRAGMENT_CHUNKS; public static final int TRYTE_WIDTH = 3; private static final int NUMBER_OF_SECURITY_LEVELS = 3; public static final int NORMALIZED_FRAGMENT_LENGTH = Kerl.HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS; private static final byte MIN_TRIT_VALUE = -1, MAX_TRIT_VALUE = 1; private static final byte MIN_TRYTE_VALUE = -13, MAX_TRYTE_VALUE = 13; public static void subseed(SpongeFactory.Mode mode, byte[] subseed, int index) { if (index < 0) { throw new RuntimeException("Invalid subseed index: " + index); } if (subseed.length != Kerl.HASH_LENGTH) { throw new IllegalArgumentException("Subseed array is not of HASH_LENGTH"); } while (index-- > 0) { for (int i = 0; i < subseed.length; i++) { if (++subseed[i] > MAX_TRIT_VALUE) { subseed[i] = MIN_TRIT_VALUE; } else { break; } } } final Sponge hash = SpongeFactory.create(mode); hash.absorb(subseed, 0, subseed.length); hash.squeeze(subseed, 0, subseed.length); } public static void key(SpongeFactory.Mode mode, final byte[] subseed, byte[] key) { if (subseed.length != Kerl.HASH_LENGTH) { throw new RuntimeException("Invalid subseed length: " + subseed.length); } if ((key.length % FRAGMENT_LENGTH) != 0) { throw new IllegalArgumentException("key length must be multiple of fragment length"); } int numberOfFragments = key.length / FRAGMENT_LENGTH; if (numberOfFragments <= 0) { throw new RuntimeException("Invalid number of key fragments: " + numberOfFragments); } final Sponge hash = SpongeFactory.create(mode); hash.absorb(subseed, 0, subseed.length); hash.squeeze(key, 0, key.length); } public static void digests(SpongeFactory.Mode mode, final byte[] key, byte[] digests) { if (key.length == 0 || key.length % FRAGMENT_LENGTH != 0) { throw new RuntimeException("Invalid key length: " + key.length); } if (digests.length != (key.length / FRAGMENT_LENGTH * Kerl.HASH_LENGTH)) { throw new IllegalArgumentException("Invalid digests length"); } final Sponge hash = SpongeFactory.create(mode); for (int i = 0; i < key.length / FRAGMENT_LENGTH; i++) { final byte[] buffer = Arrays.copyOfRange(key, i * FRAGMENT_LENGTH, (i + 1) * FRAGMENT_LENGTH); for (int j = 0; j < NUMBER_OF_FRAGMENT_CHUNKS; j++) { for (int k = MAX_TRYTE_VALUE - MIN_TRYTE_VALUE; k-- > 0;) { hash.reset(); hash.absorb(buffer, j * Kerl.HASH_LENGTH, Kerl.HASH_LENGTH); hash.squeeze(buffer, j * Kerl.HASH_LENGTH, Kerl.HASH_LENGTH); } } hash.reset(); hash.absorb(buffer, 0, buffer.length); hash.squeeze(digests, i * Kerl.HASH_LENGTH, Kerl.HASH_LENGTH); } } public static void address(SpongeFactory.Mode mode, final byte[] digests, byte[] address) { if (digests.length == 0 || digests.length % Kerl.HASH_LENGTH != 0) { throw new RuntimeException("Invalid digests length: " + digests.length); } if (address.length != Kerl.HASH_LENGTH) { throw new IllegalArgumentException("Invalid address length"); } final Sponge hash = SpongeFactory.create(mode); hash.absorb(digests, 0, digests.length); hash.squeeze(address, 0, address.length); } public static void digest(SpongeFactory.Mode mode, final byte[] normalizedBundleFragment, int nbOff, final byte[] signatureFragment, int sfOff, byte[] digest) { if (normalizedBundleFragment.length - nbOff < (Curl.HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS)) { throw new RuntimeException( "Invalid normalized bundleValidator fragment length: " + normalizedBundleFragment.length); } if (signatureFragment.length - sfOff < FRAGMENT_LENGTH) { throw new RuntimeException("Invalid signature fragment length: " + signatureFragment.length); } if (digest.length != Curl.HASH_LENGTH) { throw new IllegalArgumentException("Invalid digest array length."); } final byte[] buffer = Arrays.copyOfRange(signatureFragment, sfOff, sfOff + FRAGMENT_LENGTH); final Sponge hash = SpongeFactory.create(mode); for (int j = 0; j < NUMBER_OF_FRAGMENT_CHUNKS; j++) { for (int k = normalizedBundleFragment[nbOff + j] - MIN_TRYTE_VALUE; k-- > 0;) { hash.reset(); hash.absorb(buffer, j * Curl.HASH_LENGTH, Curl.HASH_LENGTH); hash.squeeze(buffer, j * Curl.HASH_LENGTH, Curl.HASH_LENGTH); } } hash.reset(); hash.absorb(buffer, 0, buffer.length); hash.squeeze(digest, 0, digest.length); } public static void normalizedBundle(final byte[] bundle, byte[] normalizedBundle) { if (bundle.length != Curl.HASH_LENGTH) { throw new RuntimeException("Invalid bundleValidator length: " + bundle.length); } for (int i = 0; i < NUMBER_OF_SECURITY_LEVELS; i++) { int sum = 0; for (int j = i * (Curl.HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS); j < (i + 1) * (Curl.HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS); j++) { normalizedBundle[j] = (byte) (bundle[j * TRYTE_WIDTH] + bundle[j * TRYTE_WIDTH + 1] * 3 + bundle[j * TRYTE_WIDTH + 2] * 9); sum += normalizedBundle[j]; } if (sum > 0) { while (sum-- > 0) { for (int j = i * (Curl.HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS); j < (i + 1) * (Curl.HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS); j++) { if (normalizedBundle[j] > MIN_TRYTE_VALUE) { normalizedBundle[j]--; break; } } } } else { while (sum++ < 0) { for (int j = i * (Curl.HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS); j < (i + 1) * (Curl.HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS); j++) { if (normalizedBundle[j] < MAX_TRYTE_VALUE) { normalizedBundle[j]++; break; } } } } } } }
7,308
37.87766
117
java
iri
iri-master/src/main/java/com/iota/iri/crypto/Kerl.java
package com.iota.iri.crypto; import com.google.common.annotations.VisibleForTesting; import com.iota.iri.utils.Converter; import org.bouncycastle.jcajce.provider.digest.Keccak; import java.math.BigInteger; import java.security.DigestException; import java.util.Arrays; import java.util.stream.IntStream; /** * Kerl is a cryptographic hash function, from the sponge family. <br> * Kerl is a ternary wrapper to {@code Keccak-384}. * * @see <a href="https://github.com/iotaledger/kerl/blob/master/IOTA-Kerl-spec.md">Kerl spec</a> */ public final class Kerl implements Sponge { private static final int BIT_HASH_LENGTH = 384; @VisibleForTesting static final int BYTE_HASH_LENGTH = BIT_HASH_LENGTH / 8; private static final BigInteger RADIX = BigInteger.valueOf(Converter.RADIX); @VisibleForTesting static final int MAX_POWERS_LONG = 40; private static final BigInteger[] RADIX_POWERS = IntStream.range(0, MAX_POWERS_LONG + 1).mapToObj(RADIX::pow).toArray(BigInteger[]::new); //delegate private final Keccak.Digest384 keccak; protected Kerl() { this.keccak = new Keccak.Digest384(); } @Override public void reset() { this.keccak.reset(); } @Override public void absorb(final byte[] trits, final int offset, final int length) { if (length % 243 != 0) { throw new RuntimeException("Illegal length: " + length); } for (int pos = offset; pos < offset + length; pos += HASH_LENGTH) { //convert to bytes && update byte[] state = new byte[BYTE_HASH_LENGTH]; trits[pos + HASH_LENGTH - 1] = 0; bytesFromBigInt(bigIntFromTrits(trits, pos, HASH_LENGTH), state); keccak.update(state); } } /** * {@inheritDoc} * * <p> * <b>Due to a known vulnerability in this implementation, no more than 243 trits should be squeezed.</b> * </p> */ @Override public void squeeze(final byte[] trits, final int offset, final int length) { if (length % 243 != 0) { throw new IllegalArgumentException("Illegal length: " + length); } try { for (int pos = offset; pos < offset + length; pos += HASH_LENGTH) { byte[] state = new byte[BYTE_HASH_LENGTH]; keccak.digest(state, 0, BYTE_HASH_LENGTH); //convert into trits BigInteger value = new BigInteger(state); tritsFromBigInt(value, trits, pos, Sponge.HASH_LENGTH); trits[pos + HASH_LENGTH - 1] = 0; //calculate hash again for (int i = state.length; i-- > 0; ) { state[i] = (byte) (state[i] ^ 0xFF); } keccak.update(state); } } catch (DigestException e) { e.printStackTrace(System.err); throw new RuntimeException(e); } } //Bytes<->Trits Converters, used to convert 384bit to 243trits static BigInteger bigIntFromTrits(final byte[] trits, final int offset, final int size) { for (int i = offset; i < offset + size; i++) { if (trits[i] < -1 || trits[i] > 1) { throw new IllegalArgumentException("not a trit: " + trits[i]); } } BigInteger value = BigInteger.ZERO; for (int n = offset + size - 1; n >= offset; ) { int count = 0; long num = 0L; while (n >= offset && count < MAX_POWERS_LONG) { num = 3 * num + trits[n--]; count++; } value = value.multiply(RADIX_POWERS[count]).add(BigInteger.valueOf(num)); } return value; } static void tritsFromBigInt(final BigInteger value, final byte[] destination, final int offset, final int size) { if (destination.length - offset < size) { throw new IllegalArgumentException("Destination array has invalid size"); } final int signum = value.signum(); if (signum == 0) { Arrays.fill(destination, offset, size, (byte) 0); return; } BigInteger absoluteValue = value.abs(); for (int i = 0; i < size; i++) { BigInteger[] divRemainder = absoluteValue.divideAndRemainder(RADIX); absoluteValue = divRemainder[0]; byte remainder = divRemainder[1].byteValue(); if (remainder > Converter.MAX_TRIT_VALUE) { remainder = Converter.MIN_TRIT_VALUE; absoluteValue = absoluteValue.add(BigInteger.ONE); } destination[offset + i] = signum < 0 ? (byte) -remainder : remainder; } } static void bytesFromBigInt(final BigInteger value, final byte[] destination) { if (destination.length < BYTE_HASH_LENGTH) { throw new IllegalArgumentException("Destination array has invalid size."); } byte[] bytes = value.toByteArray(); int start = BYTE_HASH_LENGTH - bytes.length; Arrays.fill(destination, 0, start, (byte) (value.signum() < 0 ? -1 : 0)); for (int i = 0; i < bytes.length; i++) { destination[start++] = bytes[i]; } } }
5,287
34.253333
141
java
iri
iri-master/src/main/java/com/iota/iri/crypto/PearlDiver.java
package com.iota.iri.crypto; import java.util.ArrayList; import java.util.List; /** * Proof of Work calculator. * <p> * Given a transaction's trits, computes an additional nonce such that * the hash ends with {@code minWeightMagnitude} zeros:<br> * </p> * <pre> * trailingZeros(hash(transaction || nonce)) < minWeightMagnitude * </pre> */ public class PearlDiver { enum State { RUNNING, CANCELLED, COMPLETED } private static final int TRANSACTION_LENGTH = 8019; private static final int CURL_HASH_LENGTH = 243; private static final int CURL_STATE_LENGTH = CURL_HASH_LENGTH * 3; private static final long HIGH_BITS = 0b11111111_11111111_11111111_11111111_11111111_11111111_11111111_11111111L; private static final long LOW_BITS = 0b00000000_00000000_00000000_00000000_00000000_00000000_00000000_00000000L; private volatile State state; private final Object syncObj = new Object(); /** * Searches for a nonce such that the hash ends with {@code minWeightMagnitude} zeros.<br> * To add the {@value com.iota.iri.controllers.TransactionViewModel#NONCE_TRINARY_SIZE} * trits long nounce {@code transactionTrits} are changed from the following offset: * {@value com.iota.iri.controllers.TransactionViewModel#NONCE_TRINARY_OFFSET} <br> * * @param transactionTrits trits of transaction * @param minWeightMagnitude target weight for trailing zeros * @param numberOfThreads number of worker threads to search for a nonce * @return <tt>true</tt> if search completed successfully. * the nonce will be written to the end of {@code transactionTrits} */ public synchronized boolean search(final byte[] transactionTrits, final int minWeightMagnitude, int numberOfThreads) { validateParameters(transactionTrits, minWeightMagnitude); synchronized (syncObj) { state = State.RUNNING; } final long[] midStateLow = new long[CURL_STATE_LENGTH]; final long[] midStateHigh = new long[CURL_STATE_LENGTH]; initializeMidCurlStates(transactionTrits, midStateLow, midStateHigh); if (numberOfThreads <= 0) { int available = Runtime.getRuntime().availableProcessors(); numberOfThreads = Math.max(1, Math.floorDiv(available * 8, 10)); } List<Thread> workers = new ArrayList<>(numberOfThreads); while (numberOfThreads-- > 0) { long[] midStateCopyLow = midStateLow.clone(); long[] midStateCopyHigh = midStateHigh.clone(); Runnable runnable = getRunnable(numberOfThreads, transactionTrits, minWeightMagnitude, midStateCopyLow, midStateCopyHigh); Thread worker = new Thread(runnable); workers.add(worker); worker.setName(this + ":worker-" + numberOfThreads); worker.setDaemon(true); worker.start(); } for (Thread worker : workers) { try { worker.join(); } catch (InterruptedException e) { synchronized (syncObj) { state = State.CANCELLED; } } } return state == State.COMPLETED; } /** * Cancels the running search task. */ public void cancel() { synchronized (syncObj) { state = State.CANCELLED; } } private static void validateParameters(byte[] transactionTrits, int minWeightMagnitude) { if (transactionTrits.length != TRANSACTION_LENGTH) { throw new RuntimeException( "Invalid transaction trits length: " + transactionTrits.length); } if (minWeightMagnitude < 0 || minWeightMagnitude > CURL_HASH_LENGTH) { throw new RuntimeException("Invalid min weight magnitude: " + minWeightMagnitude); } } private Runnable getRunnable(final int threadIndex, final byte[] transactionTrits, final int minWeightMagnitude, final long[] midStateCopyLow, final long[] midStateCopyHigh) { return () -> { for (int i = 0; i < threadIndex; i++) { increment(midStateCopyLow, midStateCopyHigh, 162 + CURL_HASH_LENGTH / 9, 162 + (CURL_HASH_LENGTH / 9) * 2); } final long[] stateLow = new long[CURL_STATE_LENGTH]; final long[] stateHigh = new long[CURL_STATE_LENGTH]; final long[] scratchpadLow = new long[CURL_STATE_LENGTH]; final long[] scratchpadHigh = new long[CURL_STATE_LENGTH]; final int maskStartIndex = CURL_HASH_LENGTH - minWeightMagnitude; long mask = 0; while (state == State.RUNNING && mask == 0) { increment(midStateCopyLow, midStateCopyHigh, 162 + (CURL_HASH_LENGTH / 9) * 2, CURL_HASH_LENGTH); copy(midStateCopyLow, midStateCopyHigh, stateLow, stateHigh); transform(stateLow, stateHigh, scratchpadLow, scratchpadHigh); mask = HIGH_BITS; for (int i = maskStartIndex; i < CURL_HASH_LENGTH && mask != 0; i++) { mask &= ~(stateLow[i] ^ stateHigh[i]); } } if (mask != 0) { synchronized (syncObj) { if (state == State.RUNNING) { state = State.COMPLETED; long outMask = 1; while ((outMask & mask) == 0) { outMask <<= 1; } for (int i = 0; i < CURL_HASH_LENGTH; i++) { transactionTrits[TRANSACTION_LENGTH - CURL_HASH_LENGTH + i] = (midStateCopyLow[i] & outMask) == 0 ? 1 : (midStateCopyHigh[i] & outMask) == 0 ? (byte) -1 : (byte) 0; } } } } }; } private static void copy(long[] srcLow, long[] srcHigh, long[] destLow, long[] destHigh) { System.arraycopy(srcLow, 0, destLow, 0, CURL_STATE_LENGTH); System.arraycopy(srcHigh, 0, destHigh, 0, CURL_STATE_LENGTH); } private static void initializeMidCurlStates(byte[] transactionTrits, long[] midStateLow, long[] midStateHigh) { for (int i = CURL_HASH_LENGTH; i < CURL_STATE_LENGTH; i++) { midStateLow[i] = HIGH_BITS; midStateHigh[i] = HIGH_BITS; } int offset = 0; final long[] curlScratchpadLow = new long[CURL_STATE_LENGTH]; final long[] curlScratchpadHigh = new long[CURL_STATE_LENGTH]; for (int i = (TRANSACTION_LENGTH - CURL_HASH_LENGTH) / CURL_HASH_LENGTH; i-- > 0; ) { for (int j = 0; j < CURL_HASH_LENGTH; j++) { switch (transactionTrits[offset++]) { case 0: midStateLow[j] = HIGH_BITS; midStateHigh[j] = HIGH_BITS; break; case 1: midStateLow[j] = LOW_BITS; midStateHigh[j] = HIGH_BITS; break; default: midStateLow[j] = HIGH_BITS; midStateHigh[j] = LOW_BITS; } } transform(midStateLow, midStateHigh, curlScratchpadLow, curlScratchpadHigh); } for (int i = 0; i < 162; i++) { switch (transactionTrits[offset++]) { case 0: midStateLow[i] = HIGH_BITS; midStateHigh[i] = HIGH_BITS; break; case 1: midStateLow[i] = LOW_BITS; midStateHigh[i] = HIGH_BITS; break; default: midStateLow[i] = HIGH_BITS; midStateHigh[i] = LOW_BITS; } } midStateLow[162 + 0] = 0b1101101101101101101101101101101101101101101101101101101101101101L; midStateHigh[162 + 0] = 0b1011011011011011011011011011011011011011011011011011011011011011L; midStateLow[162 + 1] = 0b1111000111111000111111000111111000111111000111111000111111000111L; midStateHigh[162 + 1] = 0b1000111111000111111000111111000111111000111111000111111000111111L; midStateLow[162 + 2] = 0b0111111111111111111000000000111111111111111111000000000111111111L; midStateHigh[162 + 2] = 0b1111111111000000000111111111111111111000000000111111111111111111L; midStateLow[162 + 3] = 0b1111111111000000000000000000000000000111111111111111111111111111L; midStateHigh[162 + 3] = 0b0000000000111111111111111111111111111111111111111111111111111111L; } private static void transform(final long[] stateLow, final long[] stateHigh, final long[] scratchpadLow, final long[] scratchpadHigh) { for (int round = 0; round < Curl.NUMBER_OF_ROUNDSP81; round++) { copy(stateLow, stateHigh, scratchpadLow, scratchpadHigh); int scratchpadIndex = 0; for (int stateIndex = 0; stateIndex < CURL_STATE_LENGTH; stateIndex++) { final long alpha = scratchpadLow[scratchpadIndex]; final long beta = scratchpadHigh[scratchpadIndex]; if (scratchpadIndex < 365) { scratchpadIndex += 364; } else { scratchpadIndex += -365; } final long delta = alpha & (scratchpadLow[scratchpadIndex] ^ beta); stateLow[stateIndex] = ~delta; stateHigh[stateIndex] = (alpha ^ scratchpadHigh[scratchpadIndex]) | delta; } } } private static void increment(final long[] midStateCopyLow, final long[] midStateCopyHigh, final int fromIndex, final int toIndex) { for (int i = fromIndex; i < toIndex; i++) { if (midStateCopyLow[i] == LOW_BITS) { midStateCopyLow[i] = HIGH_BITS; midStateCopyHigh[i] = LOW_BITS; } else if (midStateCopyHigh[i] == LOW_BITS) { midStateCopyHigh[i] = HIGH_BITS; break; } else { midStateCopyLow[i] = LOW_BITS; break; } } } }
10,560
40.90873
134
java
iri
iri-master/src/main/java/com/iota/iri/crypto/Sponge.java
package com.iota.iri.crypto; /** * Hash functions from the Sponge family. * @see <a href="https://en.wikipedia.org/wiki/Sponge_function">https://en.wikipedia.org/wiki/Sponge_function</a><br> * * Popular usage: Hash(x) * <pre> * new sponge <br> * sponge.absorb(x) <br> * return sponge.squeeze() <br> * </pre> * */ public interface Sponge { int HASH_LENGTH = 243; /** * Absorbs {@code trits}, in chunks of {@value #HASH_LENGTH}.<br> * can be called consecutively to absorb more trits. * * @param trits trits array to be absorbed by the sponge * @param offset starting position in trits array * @param length amount of trits to absorb, multiple of {@value #HASH_LENGTH} */ void absorb(final byte[] trits, int offset, int length); /** * Squeezes {@code length} trits from the sponge, in chunks of {@value #HASH_LENGTH}.<br> * can be called consecutively to squeeze more trits.<br> * this method will override the content of {@code trits} * * @param trits trits array to write squeezed trits to * @param offset starting position to write to in trits array * @param length amount of trits to squeeze, multiple of {@value #HASH_LENGTH} */ void squeeze(final byte[] trits, int offset, int length); /** * Resets the internal state of the sponge.<br> * Can be used to re-use a Sponge object. */ void reset(); }
1,434
30.888889
117
java
iri
iri-master/src/main/java/com/iota/iri/crypto/SpongeFactory.java
package com.iota.iri.crypto; /** * Creates sponge objects, based on required {@code Mode} * * @see Mode */ public abstract class SpongeFactory { /** * Modes of sponge constructions. * Determines which hash function we will use */ public enum Mode { CURLP81, CURLP27, KERL, //BCURLT } /** * Creates a new sponge object, based on required {@code Mode} * @param mode name of the hash function to use. * @return a newly initialized sponge */ public static Sponge create(Mode mode){ switch (mode) { case CURLP81: return new Curl(mode); case CURLP27: return new Curl(mode); case KERL: return new Kerl(); //case BCURLT: return new Curl(true, mode); default: return null; } } }
843
23.114286
66
java
iri
iri-master/src/main/java/com/iota/iri/crypto/batched/BCTCurl.java
package com.iota.iri.crypto.batched; /** * A Curl implementation which absorbs binary-encoded-ternary inputs. */ public class BCTCurl { private static final long HIGH_LONG_BITS = 0xFFFF_FFFF_FFFF_FFFFL; private int hashLength; private int numberOfRounds; private int stateLength; private BCTrinary state; /** * Creates a new {@link BCTCurl} with the given hash length and number of rounds. * @param hashLength the desired hash length * @param numberOfRounds the number of hashing rounds to apply */ public BCTCurl(int hashLength, int numberOfRounds) { this.hashLength = hashLength; this.numberOfRounds = numberOfRounds; this.stateLength = 3 * hashLength; this.state = new BCTrinary(new long[3 * hashLength], new long[3 * hashLength]); reset(); } /** * Resets the state of the hashing function. */ public void reset() { for (int i = 0; i < stateLength; i++) { state.low[i] = HIGH_LONG_BITS; state.high[i] = HIGH_LONG_BITS; } } /** * Transforms the state of the hashing function. */ public void transform() { long[] scratchPadLow = new long[stateLength]; long[] scratchPadHigh = new long[stateLength]; int scratchPadIndex = 0; for (int round = numberOfRounds; round > 0; round--) { System.arraycopy(state.low, 0, scratchPadLow, 0, state.low.length); System.arraycopy(state.high, 0, scratchPadHigh, 0, state.high.length); for (int stateIndex = 0; stateIndex < stateLength; stateIndex++) { long alpha = scratchPadLow[scratchPadIndex]; long beta = scratchPadHigh[scratchPadIndex]; if (scratchPadIndex < 365) { scratchPadIndex += 364; } else { scratchPadIndex -= 365; } long delta = beta ^ scratchPadLow[scratchPadIndex]; state.low[stateIndex] = ~(delta & alpha); state.high[stateIndex] = (alpha ^ scratchPadHigh[scratchPadIndex]) | delta; } } } /** * Absorbs the given binary-encoded-ternary trits data. * @param bcTrits the binary-encoded-ternary trits to absorb */ public void absorb(BCTrinary bcTrits) { int length = bcTrits.low.length; int offset = 0; for (; ; ) { int lengthToCopy; if (length < hashLength) { lengthToCopy = length; } else { lengthToCopy = hashLength; } System.arraycopy(bcTrits.low, offset, state.low, 0, lengthToCopy); System.arraycopy(bcTrits.high, offset, state.high, 0, lengthToCopy); transform(); offset += lengthToCopy; length -= lengthToCopy; if (length <= 0) { break; } } } /** * Squeezes the desired length from the state of the hashing function. * @param tritCount the desired length of the result * @return the binary-encoded-trinary data */ public BCTrinary squeeze(int tritCount) { BCTrinary result = new BCTrinary(new long[tritCount], new long[tritCount]); int hashCount = tritCount / hashLength; for (int i = 0; i < hashCount; i++) { System.arraycopy(state.low, 0, result.low, i * hashLength, hashLength); System.arraycopy(state.high, 0, result.high, i * hashLength, hashLength); transform(); } int last = tritCount - hashCount * hashLength; System.arraycopy(state.low, 0, result.low, tritCount - last, last); System.arraycopy(state.high, 0, result.high, tritCount - last, last); if (tritCount % hashLength != 0) { transform(); } return result; } }
3,934
31.254098
91
java
iri
iri-master/src/main/java/com/iota/iri/crypto/batched/BCTernaryDemultiplexer.java
package com.iota.iri.crypto.batched; /** * Demultiplexes long values into byte arrays. */ public class BCTernaryDemultiplexer { private BCTrinary bcTrinary; /** * Creates a new {@link BCTernaryDemultiplexer} with the given * binary-encoded-ternary data to demultiplex. * @param bcTrinary the binary-encoded-trinary objet to demultiplex */ public BCTernaryDemultiplexer(BCTrinary bcTrinary) { this.bcTrinary = bcTrinary; } /** * Constructs the demultiplexed version of a given column index. * @param index the column index to demultiplex * @return the byte array at the given column index */ public byte[] get(int index) { int length = bcTrinary.low.length; byte[] result = new byte[length]; for (int i = 0; i < length; i++) { long low = (bcTrinary.low[i] >> index) & 1; long high = (bcTrinary.high[i] >> index) & 1; if (low == 1 && high == 0) { result[i] = -1; continue; } if (low == 0 && high == 1) { result[i] = 1; continue; } result[i] = 0; } return result; } }
1,239
25.382979
71
java
iri
iri-master/src/main/java/com/iota/iri/crypto/batched/BCTernaryMultiplexer.java
package com.iota.iri.crypto.batched; import java.util.List; /** * Multiplexes input trits data to a {@link BCTrinary}. */ public class BCTernaryMultiplexer { private List<byte[]> inputs; /** * Creates a new {@link BCTernaryMultiplexer} which multiplexes * the given trits data. * @param inputs the input trits data to multiplex */ public BCTernaryMultiplexer(List<byte[]> inputs) { this.inputs = inputs; } /** * Multiplexes the input data into a binary-encoded ternary format. * * @return the extracted data in binary-encoded-ternary format */ public BCTrinary extract() { final int trinariesCount = inputs.size(); final int tritsCount = inputs.get(0).length; BCTrinary result = new BCTrinary(new long[tritsCount], new long[tritsCount]); for (int i = 0; i < tritsCount; i++) { BCTrit bcTrit = new BCTrit(); for (int j = 0; j < trinariesCount; j++) { switch (inputs.get(j)[i]) { case -1: bcTrit.low |= 1L << j; break; case 1: bcTrit.high |= 1L << j; break; case 0: bcTrit.low |= 1L << j; bcTrit.high |= 1L << j; break; default: // do nothing } } result.low[i] = bcTrit.low; result.high[i] = bcTrit.high; } return result; } }
1,623
27
85
java
iri
iri-master/src/main/java/com/iota/iri/crypto/batched/BCTrinary.java
package com.iota.iri.crypto.batched; /** * Represents multiplexed binary-encoded-ternary values. */ public class BCTrinary { public long[] low; public long[] high; /** * Creates a new {@link BCTrinary} with the given low/high bit long values. * @param low the low bit values * @param high the high bit values */ public BCTrinary(long[] low, long[] high) { this.low = low; this.high = high; } }
456
19.772727
79
java
iri
iri-master/src/main/java/com/iota/iri/crypto/batched/BCTrit.java
package com.iota.iri.crypto.batched; /** * Represents a single row of multiplexed binary-encoded-ternary values. * Following formula applies: trit value -1 => high 0, low 1, * trit value 0 => high 1, low 1, trit value 1 => high 1, low 0 */ public class BCTrit { public long low; public long high; }
314
21.5
72
java
iri
iri-master/src/main/java/com/iota/iri/crypto/batched/BatchedBCTCurl.java
package com.iota.iri.crypto.batched; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; /** * BatchedBCTCurl takes care of batching up hashing requests and starts processing them through a BCTCurl once either * all available slots are filled or no request is submitted within a given timeout. */ public class BatchedBCTCurl implements BatchedHasher { private static final Logger log = LoggerFactory.getLogger(BatchedBCTCurl.class); // we have max 64 bits/slots available for requests to fill up private final static int MAX_BATCH_SIZE = 64; private ArrayBlockingQueue<HashRequest> reqQueue; private int hashLength; private int numberOfRounds; private int batchTimeoutMilliSec; /** * Creates a new {@link BatchedBCTCurl} with the given hash length, number of rounds and default batch timeout. * * @param hashLength the desired hash length * @param numberOfRounds the number of hashing rounds to apply */ public BatchedBCTCurl(int hashLength, int numberOfRounds) { this.hashLength = hashLength; this.numberOfRounds = numberOfRounds; this.batchTimeoutMilliSec = BatchedHasher.DEFAULT_BATCH_TIMEOUT_MILLISECONDS; this.reqQueue = new ArrayBlockingQueue<>(MAX_BATCH_SIZE * 2); } /** * Creates a new {@link BatchedBCTCurl} with the given hash length, number of rounds and batch timeout. * * @param hashLength the desired hash length * @param numberOfRounds the number of hashing rounds to apply * @param timeoutMilliseconds the timeout to wait for new incoming hashing requests before starting the process */ public BatchedBCTCurl(int hashLength, int numberOfRounds, int timeoutMilliseconds) { this.hashLength = hashLength; this.numberOfRounds = numberOfRounds; this.batchTimeoutMilliSec = timeoutMilliseconds; this.reqQueue = new ArrayBlockingQueue<>(MAX_BATCH_SIZE * 2); } @Override public void submitHashingRequest(HashRequest req) { try { reqQueue.put(req); } catch (InterruptedException e) { e.printStackTrace(); } } @Override public void run() { List<HashRequest> reqs = new ArrayList<>(); long last = System.currentTimeMillis(); long processed = 0, cycles = 0, cyclesTimeSum = 0; while (!Thread.currentThread().isInterrupted()) { try { long start = System.currentTimeMillis(); // await the first request reqs.add(reqQueue.take()); // batch up requests until we hit the timeout once while (true) { HashRequest newReq = reqQueue.poll(batchTimeoutMilliSec, TimeUnit.MILLISECONDS); // didn't get any request within the timeout, lets thereby // start processing batched up requests. if (newReq == null) { break; } reqs.add(newReq); if (reqs.size() == MAX_BATCH_SIZE) { break; } } processed += reqs.size(); process(reqs); reqs.clear(); // remember some stats long now = System.currentTimeMillis(); cycles++; cyclesTimeSum += now - start; // print some stats every now and then if (now - last >= 20000L) { long maxReqsPossibleToBeProcessed = cycles * MAX_BATCH_SIZE; double ratio = Math.floor(((double) processed / (double) maxReqsPossibleToBeProcessed) * 100); double avgCycleTime = cyclesTimeSum / cycles; log.info( "batching saturation ratio {}% (processed {} / max possible {}), cycles {}, avg. cycle time {}ms", ratio, processed, maxReqsPossibleToBeProcessed, cycles, avgCycleTime); last = now; processed = 0; cycles = 0; cyclesTimeSum = 0; } } catch (InterruptedException e) { log.info("shutdown signal received"); Thread.currentThread().interrupt(); } } log.info("BatchedBCTCurl shutdown"); } /** * Processes the list of the given requests and executes the callbacks provided with each request after completion. * * @param reqs The requests to process. */ private void process(List<HashRequest> reqs) { // multiplex input data ArrayList<byte[]> inputs = reqs.stream().map(HashRequest::getInput) .collect(Collectors.toCollection(ArrayList::new)); BCTernaryMultiplexer multiplexer = new BCTernaryMultiplexer(inputs); BCTrinary multiplexedData = multiplexer.extract(); // hash BCTCurl bctCurl = new BCTCurl(hashLength, numberOfRounds); bctCurl.reset(); bctCurl.absorb(multiplexedData); // demultiplex and fire callbacks BCTrinary result = bctCurl.squeeze(hashLength); BCTernaryDemultiplexer demultiplexer = new BCTernaryDemultiplexer(result); for (int i = 0; i < reqs.size(); i++) { reqs.get(i).getCallback().process(demultiplexer.get(i)); } } }
5,662
38.601399
126
java
iri
iri-master/src/main/java/com/iota/iri/crypto/batched/BatchedHasher.java
package com.iota.iri.crypto.batched; /** * A BatchedHasher is a hasher which collects inputs in order * to perform optimized hashing by hashing multiple inputs at once. */ public interface BatchedHasher extends Runnable { /** * Default max timeout in milliseconds {@link BatchedHasher}s * await for a new incoming request before starting the batched hashing process. */ int DEFAULT_BATCH_TIMEOUT_MILLISECONDS = 50; /** * Submits the given request to the {@link BatchedHasher} for processing. * The request's callback is executed within the thread of the BatchedHasher * up on completion of the processing. * * @param req The hashing request. */ void submitHashingRequest(HashRequest req); }
758
30.625
84
java
iri
iri-master/src/main/java/com/iota/iri/crypto/batched/BatchedHasherFactory.java
package com.iota.iri.crypto.batched; import com.iota.iri.crypto.Curl; import com.iota.iri.crypto.SpongeFactory; /** * Creates {@link BatchedHasher} objects based on the required type. */ public class BatchedHasherFactory { /** * The specific implementations of a {@link BatchedHasher}. */ public enum Type { BCTCURL81, BCTCURL27, FakeBatchedCURL81, FakeBatchedCURL27, } /** * Creates a new {@link BatchedHasher} instances with a default * batch timeout of {@link BatchedHasher#DEFAULT_BATCH_TIMEOUT_MILLISECONDS}. * * @param type the specific implementation of the {@link BatchedHasher} * @return the BatchedHasher instance */ public static BatchedHasher create(Type type) { return create(type, BatchedHasher.DEFAULT_BATCH_TIMEOUT_MILLISECONDS); } /** * Creates a new {@link BatchedHasher} instance. * * @param type the specific implementation of the {@link BatchedHasher} * @return the BatchedHasher instance */ public static BatchedHasher create(Type type, int batchTimeoutMilliSecs) { switch (type) { case BCTCURL81: return new BatchedBCTCurl(Curl.HASH_LENGTH, 81, batchTimeoutMilliSecs); case BCTCURL27: return new BatchedBCTCurl(Curl.HASH_LENGTH, 27, batchTimeoutMilliSecs); case FakeBatchedCURL81: return new FakeBatchedCurl(Curl.HASH_LENGTH, SpongeFactory.Mode.CURLP81); case FakeBatchedCURL27: return new FakeBatchedCurl(Curl.HASH_LENGTH, SpongeFactory.Mode.CURLP27); default: return null; } } }
1,709
30.666667
89
java
iri
iri-master/src/main/java/com/iota/iri/crypto/batched/FakeBatchedCurl.java
package com.iota.iri.crypto.batched; import com.iota.iri.crypto.Sponge; import com.iota.iri.crypto.SpongeFactory; /** * FakeBatchedCurl implements the {@link BatchedHasher} interface * but doesn't actually do any batching. The callbacks are called * within the thread which submits the hashing requests. */ public class FakeBatchedCurl implements BatchedHasher { private int hashLength; private Sponge spongeFunc; /** * Creates a new {@link FakeBatchedCurl} with the given * hash length and mode. * * @param hashLength the desired hash length * @param mode the mode of the sponge function to use */ public FakeBatchedCurl(int hashLength, SpongeFactory.Mode mode) { this.hashLength = hashLength; this.spongeFunc = SpongeFactory.create(mode); } @Override public void submitHashingRequest(HashRequest req) { spongeFunc.absorb(req.getInput(), 0, req.getInput().length); byte[] hashTrits = new byte[hashLength]; spongeFunc.squeeze(hashTrits, 0, hashLength); req.getCallback().process(hashTrits); spongeFunc.reset(); } @Override public void run() { // do nothing } }
1,215
27.952381
69
java
iri
iri-master/src/main/java/com/iota/iri/crypto/batched/HashRequest.java
package com.iota.iri.crypto.batched; /** * A HashRequest represents a request against a {@link BatchedHasher} to hash * something and execute the given callback up on completion of the hashing. */ public class HashRequest { private byte[] input; private HashRequestCallback callback; /** * Creates a new {@link HashRequest} with the given input and callback. * @param input the trits input to hash * @param callback the callback to fire up on completion */ public HashRequest(byte[] input, HashRequestCallback callback) { this.input = input; this.callback = callback; } /** * Gets the input of this {@link HashRequest}. * @return the input */ public byte[] getInput() { return input; } /** * Gets the callback of this {@link HashRequest}. * @return the callback */ public HashRequestCallback getCallback() { return callback; } }
963
24.368421
77
java
iri
iri-master/src/main/java/com/iota/iri/crypto/batched/HashRequestCallback.java
package com.iota.iri.crypto.batched; /** * A callback executed with the result of a {@link HashRequest}. */ public interface HashRequestCallback { /** * The callback which handles the result hash trits. * @param trits the result hash trits */ void process(byte[] trits); }
299
22.076923
64
java
iri
iri-master/src/main/java/com/iota/iri/model/AbstractHash.java
package com.iota.iri.model; import com.iota.iri.crypto.Curl; import com.iota.iri.model.persistables.Transaction; import com.iota.iri.model.safe.ByteSafe; import com.iota.iri.model.safe.TritSafe; import com.iota.iri.storage.Indexable; import com.iota.iri.utils.Converter; import java.io.Serializable; import java.util.Arrays; import java.util.Objects; /** * Base implementation of a hash object */ public abstract class AbstractHash implements Hash, Serializable { private final Object lock = new Object(); private ByteSafe byteSafe; private TritSafe tritSafe; /** * Empty Constructor for a placeholder hash identifier object. Creates a hash identifier object with no properties. */ public AbstractHash() { } /** * Constructor for a hash object using a trit or byte source array. A null safe byte array of the * same size as the source will be created, storing a copy of the input values in the object. * * @param source A byte array containing the source transaction information in either trit or byte format * @param sourceOffset The offset defining the start point for the hash object in the source * @param sourceSize The size of the hash object that will be created */ public AbstractHash(byte[] source, int sourceOffset, int sourceSize) { if(sourceSize < SIZE_IN_TRITS) { byte[] dest = new byte[SIZE_IN_BYTES]; System.arraycopy(source, sourceOffset, dest, 0, Math.min(dest.length, Math.min(source.length, sourceSize))); this.byteSafe = new ByteSafe(dest); } else { byte[] dest = new byte[SIZE_IN_TRITS]; System.arraycopy(source, sourceOffset, dest, 0, dest.length); this.tritSafe = new TritSafe(dest); } } /** * Assigns the input byte data to the hash object. Each hash object can only be initialized with data * once.If the byte or trit arrays are not null, an <tt>IllegalStateException</tt> is thrown. * * @param bytes A byte array containing the source bytes */ @Override public void read(byte[] bytes) { if (bytes != null) { synchronized (lock) { if (byteSafe != null || tritSafe != null) { throw new IllegalStateException("I cannot be initialized with data twice."); } byte[] dest = new byte[SIZE_IN_BYTES]; System.arraycopy(bytes, 0, dest, 0, Math.min(dest.length, bytes.length)); byteSafe = new ByteSafe(dest); } } } /** * Checks if the hash object is storing a byte array. If the object's byte array is <tt>null</tt>, * then a byte array conversion of the object's trit array will be created and stored. If there * is no trit array present, a <tt>NullPointerException</tt> will be thrown. * * @return The stored byte array containing the hash values */ @Override public byte[] bytes() { ByteSafe safe = byteSafe; if (safe == null) { synchronized (lock) { if (byteSafe == null) { Objects.requireNonNull(tritSafe, "I need my trits to be initialized in order to construct bytes."); byte[] src = trits(); byte[] dest = new byte[SIZE_IN_BYTES]; Converter.bytes(src, 0, dest, 0, src.length); byteSafe = new ByteSafe(dest); } safe = byteSafe; } } return safe.getData(); } /** * Checks if the hash object is storing a trit array. If the object's trit array is <tt>null</tt>, * then a trit array conversion of the object's byte array will be created. If there is no byte * array present, a <tt>NullPointerException</tt> will be thrown. * * @return The stored trit array containing the hash values */ @Override public byte[] trits() { TritSafe safe = tritSafe; if (safe == null) { synchronized (lock) { if (tritSafe == null) { Objects.requireNonNull(byteSafe, "I need my bytes to be initialized in order to construct trits."); byte[] src = bytes(); byte[] dest = new byte[Curl.HASH_LENGTH]; Converter.getTrits(src, dest); tritSafe = new TritSafe(dest); } safe = tritSafe; } } return safe.getData(); } /** * @return The number of zero value trits at the end of the hash object's trit array */ @Override public int trailingZeros() { byte[] trits = trits(); int index = SIZE_IN_TRITS; int zeros = 0; while (index-- > 0 && trits[index] == 0) { zeros++; } return zeros; } @Override public Indexable incremented() { return null; } @Override public Indexable decremented() { return null; } @Override public int hashCode() { bytes(); return byteSafe.getHashcode(); } @Override public String toString() { return Converter.trytes(trits()); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } Hash hash = (Hash) obj; return Arrays.equals(bytes(), hash.bytes()); } @Override public int compareTo(Indexable indexable) { Hash hash = (indexable instanceof Hash) ? (Hash) indexable : HashFactory.GENERIC.create(Transaction.class, indexable.bytes()); if (this.equals(hash)) { return 0; } long diff = Converter.longValue(hash.trits(), 0, SIZE_IN_TRITS) - Converter.longValue(trits(), 0, SIZE_IN_TRITS); if (Math.abs(diff) > Integer.MAX_VALUE) { return diff > 0L ? Integer.MAX_VALUE : Integer.MIN_VALUE + 1; } return (int) diff; } }
6,140
33.307263
134
java
iri
iri-master/src/main/java/com/iota/iri/model/AddressHash.java
package com.iota.iri.model; /** * The public <tt>Address</tt> hash identifier of a transaction * * <p> * An <tt>Address</tt> hash can represent the sending or receiving party of a transaction. The Address * hash can also be used to find specific associated transactions within the DB of a node using * the API. * </p> */ public class AddressHash extends AbstractHash { /** * Empty Constructor for an <tt>Address</tt> hash identifier object. Creates a placeholder <tt>Address</tt> hash * identifier object with no properties. */ public AddressHash() { } /** * Constructor for an <tt>Address</tt> hash identifier using a source array and starting point * * @param bytes The trit or byte array source that the object will be generated from * @param offset The starting point in the array for the beginning of the Address Hash object * @param sizeInBytes The size of the Address Hash object that is to be created */ protected AddressHash(byte[] bytes, int offset, int sizeInBytes) { super(bytes, offset, sizeInBytes); } }
1,115
33.875
116
java
iri
iri-master/src/main/java/com/iota/iri/model/BundleHash.java
package com.iota.iri.model; /** * The public <tt>Bundle</tt> hash identifier of a transaction * * <p> * Bundles are collections of transactions that can be attached to the tangle at once. * When transactions are placed in a bundle, they have the Bundle hash added to them. * This bundle hash can be used to find these transactions and confirm that they were * a part of the same batch of transactions. If a bundle is deemed invalid, all * transactions contained will also be deemed invalid. * </p> */ public class BundleHash extends AbstractHash { /** * Empty Constructor for a <tt>Bundle</tt> hash identifier object. Creates a placeholder <tt>Bundle</tt> hash * identifier object with no properties. */ public BundleHash() { } /** * Constructor for a <tt>Bundle</tt> hash identifier using a source array and starting point * * @param bytes The trit or byte array source that the object will be generated from * @param offset The starting point in the array for the beginning of the Bundle Hash object * @param sizeInBytes The size of the Bundle Hash object that is to be created */ protected BundleHash(byte[] bytes, int offset, int sizeInBytes) { super(bytes, offset, sizeInBytes); } }
1,295
38.272727
113
java
iri
iri-master/src/main/java/com/iota/iri/model/Hash.java
package com.iota.iri.model; import com.iota.iri.crypto.Curl; import com.iota.iri.storage.Indexable; /** * Represents an indexable hash object */ public interface Hash extends Indexable, HashId { /** * Creates a null transaction hash with from a byte array of length {@value Curl#HASH_LENGTH}. * This is used as a reference hash for the genesis transaction. */ Hash NULL_HASH = HashFactory.TRANSACTION.create(new byte[Curl.HASH_LENGTH]); /** * The size of a hash stored in a byte[] when the data structure is trits */ int SIZE_IN_TRITS = 243; /** * The size of a hash stored in a byte[] when the data structure is bytes */ int SIZE_IN_BYTES = 49; /** * The data of this hash in trits * @return the trits */ public byte[] trits(); /** * The amount of zeros this hash has on the end. * Defines the weightMagnitude for a transaction. * @return the trailing zeros */ public int trailingZeros(); }
1,009
24.25
98
java
iri
iri-master/src/main/java/com/iota/iri/model/HashFactory.java
package com.iota.iri.model; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.iota.iri.model.persistables.Address; import com.iota.iri.model.persistables.Approvee; import com.iota.iri.model.persistables.Bundle; import com.iota.iri.model.persistables.ObsoleteTag; import com.iota.iri.model.persistables.Tag; import com.iota.iri.model.persistables.Transaction; import com.iota.iri.storage.Persistable; import com.iota.iri.utils.Converter; /** * Generates an appropriate Hash model from a set of source data. The available models that * can be instantiated include: * <ul> * <li>Transaction Hash</li> * <li>Address Hash</li> * <li>Bundle Hash</li> * <li>Tag Hash</li> * <li>Obsolete Tag Hash</li> * </ul> */ public enum HashFactory { TRANSACTION(Transaction.class), ADDRESS(Address.class), BUNDLE(Bundle.class), TAG(Tag.class), OBSOLETETAG(ObsoleteTag.class), /** * Creates from generic class, should be passed in the create() function. * Will return NULL_HASH if other functions are used */ GENERIC; private static final Logger log = LoggerFactory.getLogger(HashFactory.class); private Class<? extends Persistable> clazz; HashFactory(Class<? extends Persistable> clazz) { this.clazz = clazz; } HashFactory() { } /** * Creates a Hash using the provided trytes * @param trytes The source data as a string of trytes * @return The hash */ public Hash create(String trytes) { byte[] trits = new byte[Hash.SIZE_IN_TRITS]; Converter.trits(trytes, trits, 0); return create(clazz, trits, 0, Hash.SIZE_IN_TRITS); } /** * Creates a Hash using the provided source data * @param source The source data * @param sourceOffset The offset we start reading from * @param sourceSize The size this hash is in bytes, starting from offset * @return The hash */ public Hash create(byte[] source, int sourceOffset, int sourceSize) { return create(clazz, source, sourceOffset, sourceSize); } /** * Creates a Hash using the provided trits * @param trits The source data as an array of trits * @param sourceOffset The offset we start reading from * @return The hash */ public Hash create(byte[] trits, int sourceOffset) { return create(clazz, trits, sourceOffset, Hash.SIZE_IN_TRITS); } /** * Creates a Hash using the provided source. * Starts from the beginning, source size is based on source length * @param source The source data * @return The hash */ public Hash create(byte[] source) { return create(clazz, source, 0, source.length == Hash.SIZE_IN_TRITS ? Hash.SIZE_IN_TRITS : Hash.SIZE_IN_BYTES); } /** * Instantiates the creation of a specified Hash type. The source offset is set to 0. * * @param modelClass The model this Hash represents * @param source The source data, bytes or trits. Based on the length of source data * @return The hash of the correct type */ public Hash create(Class<?> modelClass, byte[] source) { return create(modelClass, source, 0, source.length == AbstractHash.SIZE_IN_TRITS ? AbstractHash.SIZE_IN_TRITS : AbstractHash.SIZE_IN_BYTES); } /** * Generates the specified hash type. * * @param modelClass The model this Hash represents * @param source The source data, bytes or trits * @param sourceOffset The offset in the source that the hash will be created from * @param sourceSize The size this hash is in bytes, starting from offset * @return The hash of the correct type */ public Hash create(Class<?> modelClass, byte[] source, int sourceOffset, int sourceSize) { //Transaction is first since its the most used if (modelClass.equals(Transaction.class) || modelClass.equals(Approvee.class)) { return new TransactionHash(source, sourceOffset, sourceSize); } else if (modelClass.equals(Address.class)) { return new AddressHash(source, sourceOffset, sourceSize); } else if (modelClass.equals(Bundle.class)) { return new BundleHash(source, sourceOffset, sourceSize); } else if (modelClass.equals(Tag.class)) { return new TagHash(source, sourceOffset, sourceSize); } else if (modelClass.equals(ObsoleteTag.class)) { return new ObsoleteTagHash(source, sourceOffset, sourceSize); } else { log.warn("Tried to construct hash from unknown class " + modelClass); //Default to transaction hash or NULL_HASH? return new TransactionHash(source, sourceOffset, sourceSize); } } }
4,874
34.326087
148
java
iri
iri-master/src/main/java/com/iota/iri/model/HashId.java
package com.iota.iri.model; /** * Represents an ID reference for a transaction, address or bundle. Stores the bytes * of the object reference. */ public interface HashId { /**@return the bytes of the current Hash ID*/ byte[] bytes(); }
249
19.833333
84
java
iri
iri-master/src/main/java/com/iota/iri/model/HashPrefix.java
package com.iota.iri.model; import com.iota.iri.crypto.Curl; import com.iota.iri.utils.Converter; import java.util.Arrays; /** * Creates a Prefix Hash object with a byte array length of {@value PREFIX_LENGTH}. */ public final class HashPrefix implements HashId { public static final int PREFIX_LENGTH = 44; private final byte[] bytes; /** * Checks if the given hash Id is an instance of a Hash Prefix object. If it is, the Hash Prefix object * is returned, otherwise a new Hash Prefix object is created from the given hash Id's stored bytes. * * @param hashId The hash Id that a new Prefix will be generated from * @return A Prefix of the given hash Id */ public static HashPrefix createPrefix(HashId hashId) { if (hashId == null) { return null; } if (hashId instanceof HashPrefix) { return (HashPrefix) hashId; } byte[] bytes = hashId.bytes(); bytes = Arrays.copyOf(bytes, PREFIX_LENGTH); return new HashPrefix(bytes); } /** * Generates a Hash Prefix object using the provided byte array source * @param bytes The source in bytes */ private HashPrefix(byte[] bytes) { this.bytes = bytes; } @Override public byte[] bytes() { return bytes; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } HashPrefix that = (HashPrefix) o; return Arrays.equals(bytes, that.bytes); } @Override public int hashCode() { return Arrays.hashCode(bytes); } @Override public String toString() { return trytes(bytes); } /** * Converts the given byte array to a tryte string * * @param bytes The input bytes to be converted * @return The converted tryte string */ private static String trytes(byte[] bytes) { byte[] dest = new byte[Curl.HASH_LENGTH]; Converter.getTrits(bytes, dest); return Converter.trytes(dest); } }
2,167
25.439024
107
java