Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion patchfinder/env.list
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@ CLONE_PATH=nvip_data/patch-repos
PATCH_SRC_URL_PATH=nvip_data/source_dict.json

# --- FIX FINDER VARS ---
FF_INPUT_MODE=db
FF_INPUT_MODE=db
4 changes: 2 additions & 2 deletions patchfinder/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
<version>1.0</version>

<properties>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>

Expand Down
7 changes: 3 additions & 4 deletions patchfinder/src/main/java/FixFinderMain.java
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,7 @@ private void runRabbit() {
private void runDev() {
// Manually enter CVEs for development
List<String> cveIds = new ArrayList<>();
cveIds.add("CVE-2022-27911");
cveIds.add("CVE-2023-30367");
cveIds.add("CVE-2022-0847");
cveIds.add("CVE-2023-38571");

try {
FixFinder.run(cveIds);
Expand All @@ -97,6 +95,7 @@ private void runDev() {
}

public static void main(String[] args) {
// run();
FixFinderMain finder = new FixFinderMain();
finder.start();
}
}
2 changes: 2 additions & 0 deletions patchfinder/src/main/java/db/DatabaseHelper.java
Original file line number Diff line number Diff line change
Expand Up @@ -467,4 +467,6 @@ public ArrayList<String> getCveSourcesNVD(String cve_id) {
}
return sourceURL;
}


}
10 changes: 6 additions & 4 deletions patchfinder/src/main/java/fixes/FixFinder.java
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,10 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import env.FixFinderEnvVars;
import db.DatabaseHelper;
import fixes.urlfinders.CXSecurityUrlFinder;
import fixes.urlfinders.FixUrlFinder;
import fixes.urlfinders.NvdFixUrlFinder;
import fixes.urlfinders.VulnerabilityFixUrlFinder;
import fixes.urlfinders.NvdUrlFinder;
import fixes.urlfinders.VulnerabilityUrlFinder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

Expand Down Expand Up @@ -76,8 +77,9 @@ public static void init() {
logger.info("Initializing FixUrlFinders...");

// Add the instances to the fixURLFinders list
fixURLFinders.add(new VulnerabilityFixUrlFinder());
fixURLFinders.add(new NvdFixUrlFinder());
fixURLFinders.add(new VulnerabilityUrlFinder());
fixURLFinders.add(new NvdUrlFinder());
fixURLFinders.add(new CXSecurityUrlFinder());

logger.info("Done initializing {} FixUrlFinders: {}", fixURLFinders.size(), fixURLFinders);
}
Expand Down
17 changes: 2 additions & 15 deletions patchfinder/src/main/java/fixes/FixFinderThread.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,9 @@
*/

import fixes.parsers.FixParser;
import fixes.parsers.CISAParser;
import fixes.parsers.GenericParser;
import fixes.parsers.NVDParser;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
Expand Down Expand Up @@ -83,17 +79,8 @@ public void run() {

for (String url : urls) {
CompletableFuture<List<Fix>> future = CompletableFuture.supplyAsync(() -> {


try{
FixParser parser = FixParser.getParser(cveId, url);
return parser.parse();
} catch(IOException e){
logger.error("Error occurred while parsing url {} for CVE {}: {}", url, cveId, e.toString());
e.printStackTrace();
return null;
}

FixParser parser = FixParser.getParser(cveId, url);
return parser.parse();
});

futures.add(future);
Expand Down
19 changes: 19 additions & 0 deletions patchfinder/src/main/java/fixes/FixProcessor.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package fixes;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;

import java.io.IOException;
import java.net.URL;

public abstract class FixProcessor {
// Logger instance for FixProcessors
protected static final Logger logger = LogManager.getLogger();

// Utility method for getting DOM from string URL, throws IOException in case of an error
protected Document getDOM(String url) throws IOException {
return Jsoup.parse(new URL(url), 10000);
}
}
6 changes: 1 addition & 5 deletions patchfinder/src/main/java/fixes/parsers/CISAParser.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,8 @@
*/

import fixes.Fix;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

/**
Expand All @@ -47,7 +43,7 @@ protected CISAParser(String cveId, String url){
}

@Override
protected List<Fix> parseWebPage() throws IOException {
protected List<Fix> parseWebPage() {
Elements headers = this.DOM.select("div[id=1-full__main]").first().select("h");

return this.fixes;
Expand Down
27 changes: 14 additions & 13 deletions patchfinder/src/main/java/fixes/parsers/FixParser.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,10 @@
*/

import fixes.Fix;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jsoup.Jsoup;
import fixes.FixProcessor;
import org.jsoup.nodes.Document;

import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
Expand All @@ -42,8 +39,7 @@
* @author Paul Vickers
* @author Dylan Mulligan
*/
public abstract class FixParser {
protected final static Logger logger = LogManager.getLogger();
public abstract class FixParser extends FixProcessor {
protected final String cveId;
protected final String url;

Expand All @@ -62,7 +58,7 @@ public List<Fix> parse() {
// Attempt to parse page and store returned Document object
try {
logger.info("{} is parsing url '{}'...", getClass().getSimpleName(), url);
this.DOM = Jsoup.parse(new URL(url), 10000);
this.DOM = this.getDOM(this.url);
// Call abstract method implementation based on instance
this.parseWebPage();
}
Expand All @@ -81,7 +77,7 @@ public List<Fix> parse() {

//TODO: Remove this throws unless we really need it, as URL interaction has been
// moved to parse() and the IOExceptions are handled there
protected abstract List<Fix> parseWebPage() throws IOException;
protected abstract List<Fix> parseWebPage();

/**
* Delegation method to determine which parser should be used to find fixes from the given url.
Expand All @@ -91,9 +87,14 @@ public List<Fix> parse() {
* @return Correct parser to be used
*
*/
public static FixParser getParser(String cveId, String url) throws MalformedURLException {
public static FixParser getParser(String cveId, String url) {
// Objectify url for domain extraction
final URL urlObj = new URL(url);
URL urlObj = null;
try { urlObj = new URL(url); }
catch (Exception e) {
// This should not happen, as URL has already been validated
logger.error("Fatal error occurred: {}", e.toString());
}
// Extract domain
final String domain = urlObj.getHost();

Expand All @@ -102,9 +103,9 @@ public static FixParser getParser(String cveId, String url) throws MalformedURLE

// Choose parser based on domain
switch (domain) {
case "nvd.nist.gov":
parser = new NVDParser(cveId, url);
break;
// case "nvd.nist.gov":
// parser = new NVDParser(cveId, url);
// break;
case "cisa.gov":
parser = new CISAParser(cveId, url);
break;
Expand Down
71 changes: 66 additions & 5 deletions patchfinder/src/main/java/fixes/parsers/GenericParser.java
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.jsoup.select.Elements;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;


Expand All @@ -50,6 +51,7 @@ private enum FIX_WORDS {
MITIGATION,
RESOLVE,
RESOLUTION;
// COUNTERMEASURE;

/**
* Determines if given word is a valid member of this enum (case-insensitive).
Expand All @@ -58,10 +60,58 @@ private enum FIX_WORDS {
* @return whether the word is a valid member of this enum
*/
public static boolean hasWord(String word) {
word = word.toUpperCase();
try {
FIX_WORDS.valueOf(word.toUpperCase());
FIX_WORDS.valueOf(word);
return true;
} catch (Exception ignored) { return false; }
} catch (Exception ignored) {
// If no direct match, check if word is plural and try singular form
final boolean endsWithES = word.endsWith("ES");
final boolean endsWithS = endsWithES || word.endsWith("S");

if(endsWithES || endsWithS) {
final int endIndex = endsWithES ? 2 : 1;
final String trimmedWord = word.substring(0, endIndex);
try {
FIX_WORDS.valueOf(trimmedWord);
return true;
} catch (Exception ignored1) { }
}

// Return false if no match
return false;
}
}

private static List<String> stringValues() {
return Arrays.stream(values()).map(Enum::toString).toList();
}
}

private enum FIX_WORDS_BLACKLIST {
NO, NOT;

public static int containsKeywords(String text) {
text = text.toUpperCase();
int numKeywords = 0;

// Get string values for all blacklist keywords
for (String keyword : FIX_WORDS_BLACKLIST.stringValues()) {
// Append all fix words (looking for things like "no fix" or "not resolved")
for (String fixWord : FIX_WORDS.stringValues()) {
keyword += " " + fixWord;
if(text.contains(keyword)) numKeywords++;
// Check past tense/plural forms
// else if(!keyword.endsWith("D") && text.contains(keyword + "D")) numKeywords++;
// else if(!keyword.endsWith("ED") && text.contains(keyword + "ED")) numKeywords++;
}
}

return numKeywords;
}

private static List<String> stringValues() {
return Arrays.stream(values()).map(Enum::toString).toList();
}
}

Expand All @@ -84,8 +134,11 @@ protected List<Fix> parseWebPage() {

// Iterate over header objects
for (Element e : headerElements) {
// Check text and id of header for keywords
final List<String> words = new ArrayList<>(Arrays.asList(e.text().split(" ")));
words.add(e.id());
// Split text on spaces and check each word.
for (String headerWord : e.text().split(" ")) {
for (String headerWord : words) {
// Check if word is a member of FIX_WORDS (case-insensitive)
if(FIX_WORDS.hasWord(headerWord)) {
// Find and store description elements related to the current header
Expand All @@ -98,8 +151,8 @@ protected List<Fix> parseWebPage() {
final String fixDescription = String.join(" ", descriptionElements.eachText());

// If data was found, store in a new Fix object and add to list of found fixes
if(fixDescription.length() > 0)
this.fixes.add(new Fix(cveId, fixDescription.toString(), url));
if(fixDescription.length() > 0 && isFix(fixDescription))
this.fixes.add(new Fix(cveId, fixDescription, url));

// Skip to next header
break;
Expand All @@ -110,6 +163,14 @@ protected List<Fix> parseWebPage() {
return this.fixes;
}

private boolean isFix(String fixDescription) {
// Get number of words that are blacklisted (blacklist words imply not fixed)
final int numBlacklistWords = FIX_WORDS_BLACKLIST.containsKeywords(fixDescription);

// If we find none, is likely a fix, 1 or more would imply is not a fix
return numBlacklistWords < 1;
}

private Elements findDescriptionElements(Element e) {
final Elements elements = new Elements();
// Attempt to get next sibling, store if found
Expand Down
8 changes: 1 addition & 7 deletions patchfinder/src/main/java/fixes/parsers/NVDParser.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,9 @@
*/

import fixes.Fix;
import fixes.FixFinderThread;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;

Expand Down Expand Up @@ -78,10 +73,9 @@ protected NVDParser(String cveId, String url){
* scrape for the references table and then delegate to other parsers for those sources.
*
* @return List of fixes for the CVE
* @throws IOException if an error occurs during scraping
*/
@Override
public List<Fix> parseWebPage() throws IOException{
public List<Fix> parseWebPage() {
// Isolate the HTML for the references table
Elements rows = this.DOM.select("div[id=vulnHyperlinksPanel]").first().select("table").first().select("tbody").select("tr");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import fixes.Fix;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

Expand All @@ -13,7 +12,7 @@ protected RedhatBugzillaParser(String cveId, String url){


@Override
protected List<Fix> parseWebPage() throws IOException {
protected List<Fix> parseWebPage() {
List<Fix> newFixes = new ArrayList<>();

// TODO: Add Bugzilla specific implementation
Expand Down
4 changes: 2 additions & 2 deletions patchfinder/src/main/java/fixes/parsers/RedhatParser.java
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,15 @@ protected RedhatParser(String cveId, String url){
super(cveId, url);
}

protected List<Fix> parseWebPage() throws IOException{
protected List<Fix> parseWebPage() {
throw new UnsupportedOperationException();
}

/**
* Delegates and parses the specified webpage using the RedHat Sub classes
* @return list of all found fixes
*/
@Override
@Override // TODO: Migrate to UrlFinder and make use of the new methods in FixProcessor/FixUrlFinder
public List<Fix> parse(){
// Init fixes list
this.fixes = new ArrayList<>();
Expand Down
Loading