diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..c65012f
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,12 @@
+.dcignore
+.idea
+*.iml
+
+dev/
+
+# Maven
+target/
+pom.xml.tag
+pom.xml.releaseBackup
+pom.xml.versionsBackup
+release.properties
diff --git a/pom.xml b/pom.xml
index 45de826..f3959e6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -79,6 +79,10 @@
io.quarkus
quarkus-arc
+
+ io.quarkus
+ quarkus-agroal
+
io.quarkus
quarkus-junit5
@@ -111,6 +115,13 @@
3.0.1
+
+
+ jboss
+ JBoss repository
+ http://repository.jboss.org/maven2
+
+
diff --git a/schema/.gitkeep b/schema/.gitkeep
new file mode 100644
index 0000000..e69de29
diff --git a/schema/OffRegScraper.py b/schema/OffRegScraper.py
new file mode 100644
index 0000000..1107d30
--- /dev/null
+++ b/schema/OffRegScraper.py
@@ -0,0 +1,30 @@
+from bs4 import BeautifulSoup
+import requests
+import re
+import csv
+
+results = {}
+
+URL = "https://product-security.pages.redhat.com/offering-registry/"
+r = requests.get(URL)
+
+soup = BeautifulSoup(r.text, 'html.parser')
+table = soup.find("table")
+rows = table.findAll("tr")
+
+for row in rows:
+ for elem in row.contents:
+ if row.contents[1].text == 'Offering':
+ break
+ else:
+ # We extract the short name of the URL
+ re_search = re.search('/offering-registry/offerings/(.*)/', row.contents[1].contents[0].attrs["href"])
+ results[re_search.group(1)] = row.contents[1].contents[0].text
+ break
+
+print(results)
+
+with open('offerings.csv', 'w') as csv_file:
+ writer = csv.writer(csv_file)
+ for key, value in results.items():
+ writer.writerow([key, value])
diff --git a/schema/populate.sql b/schema/populate.sql
new file mode 100644
index 0000000..d2f5584
--- /dev/null
+++ b/schema/populate.sql
@@ -0,0 +1,126 @@
+INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-automation-platform','Ansible Automation Platform (AAP)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('advisor','Insights Advisor');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-on-aws','Ansible on AWS');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-on-azure','Ansible on Azure');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-on-gcp','Ansible on GCP');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-wisdom-service','Ansible Wisdom Service');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('cert-manager','cert-manager Operator for Red Hat OpenShift');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('compliance','Insights Compliance');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('connected-customer-experience','Connected Customer Experience (CCX)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('cost-management','Cost Management');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('custom-metric-autoscaler','OpenShift Custom Metrics Autoscaler');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('developer-sandbox-for-red-hat-openshift','Developer Sandbox for Red Hat OpenShift');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('dotnet','.NET');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('drift','Insights Drift');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('eclipse-vertx','Red Hat build of Eclipse Vert.x');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('edge-management','Edge Management');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('eventing','Insights Eventing');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('fastdatapath','RHEL Fast Datapath');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('host-management-services','Host Management Services');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('hosted-control-planes','Hosted Control Planes (Hypershift)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('hybrid-application-console','Hybrid Application Console (HAC)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('insights-essential','Insights Essentials');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('kernel-module-management','Kernel Module Management');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('logging-subsystem-for-red-hat-openshift','Logging Subsystem for Red Hat OpenShift');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('lvms-operator','LVMS Operator');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('malware-detection','Insights Malware Detection');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('mgmt-platform','Management Platform');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('migration-toolkit-for-applications','Migration Toolkit for Applications (MTA)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('migration-toolkit-for-containers','Migration Toolkit for Containers (MTC)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('migration-toolkit-for-runtimes','Migration Toolkit for Runtimes (MTR)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('migration-toolkit-for-virtualization','Migration Toolkit for Virtualization (MTV)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('network-observability-operator','Network Observability Operator');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('node-healthcheck-operator','Node HealthCheck Operator');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('node-maintenance-operator','Node Maintenance Operator');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('nvidia-gpu-add-on','NVIDIA GPU Add-On');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('oadp','OpenShift API for Data Protection');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-container-platform','Openshift Container Platform (OCP)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-container-storage','OpenShift Container Storage (OCS)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-data-foundation-managed-service','Red Hat OpenShift Data Foundation Managed Service');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-dedicated','OpenShift Dedicated (OSD/ROSA)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-developer-tools-and-services-helm','OpenShift Developer Tools and Services (Helm)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-developer-tools-and-services-jenkins','OpenShift Developer Tools and Services (Jenkins)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-distributed-tracing','OpenShift Distributed Tracing');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-on-azure','Openshift on Azure (ARO)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-run-once-duration-override-operator','OpenShift Run Once Duration Override Operator');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-sandboxed-containers','Openshift Sandboxed Containers');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-secondary-scheduler-operator','OpenShift Secondary Scheduler Operator');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-servicemesh','OpenShift Service Mesh');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-virtualization','OpenShift Virtualization (CNV)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-web-terminal-operator','OpenShift Web Terminal Operator');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-winc','Windows Container Support for OpenShift');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('patch','Insights Patch');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('product-discovery','Product Discovery');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-3scale-api-management-platform','Red Hat 3scale API Management Platform');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-advanced-cluster-management','Red Hat Advanced Cluster Management (RHACM)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-broker','Red Hat AMQ Broker');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-clients','Red Hat AMQ Clients');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-interconnect','Red Hat AMQ Interconnect');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-online','Red Hat AMQ Online');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-streams','Red Hat AMQ Streams');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-build-apicurio-registry','Red Hat build of Apicurio Registry (formerly known as Integration Service Registry)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-build-quarkus','Red Hat Build of Quarkus');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-camel-extensions-quarkus','Red Hat Camel Extensions for Quarkus');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-camel-k','Red Hat Camel K');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-camel-spring-boot','Red Hat Camel for Spring Boot');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-ceph-storage','Red Hat Ceph Storage');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-certificate-system','Red Hat Certificate System (RHCS)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-certification-program','Red Hat Certification Program (rhcertification)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-code-quarkus','Red Hat Code Quarkus');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-core-os','Red Hat CoreOS');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-data-grid','Red Hat Data Grid');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-debezium','Red Hat Debezium');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-decision-manager','Red Hat Decision Manager');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-developer-hub','Red Hat Developer Hub');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-developer-toolset','Red Hat Developer Toolset (DTS)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-devtools-compilers','Red Hat Developer Tools (DevTools Compilers)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-directory-server','Red Hat Directory Server (RHDS)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-10','Red Hat Enterprise Linux (RHEL) 10');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-6','Red Hat Enterprise Linux (RHEL) 6');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-7','Red Hat Enterprise Linux (RHEL) 7');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-8','Red Hat Enterprise Linux (RHEL) 8');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-9','Red Hat Enterprise Linux (RHEL) 9');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-fuse','Red Hat Fuse');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-gluster-storage','Red Hat Gluster Storage');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-in-vehicle-os','Red Hat In-Vehicle Operating System (RHIVOS)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-jboss-core-services','Red Hat JBoss Core Services');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-jboss-eap','Red Hat JBoss Enterprise Application Platform (EAP)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-jboss-web-server','Red Hat JBoss Web Server');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-observability-service','Red Hat Observability Service');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-open-database-access','Red Hat OpenShift Database Access');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-open-shift-data-science','Red Hat OpenShift Data Science (RHODS)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openjdk','Red Hat OpenJDK');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-api-management','Red Hat OpenShift API Management');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-builds-v2','Red Hat OpenShift Builds V2');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-connectors','Red Hat OpenShift Connectors (RHOC)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-control-plane-service','Red Hat OpenShift Control Plane Service');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-data-foundation','Red Hat OpenShift Data Foundation');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-dev-spaces','Red Hat OpenShift Dev Spaces');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-gitops','Red Hat OpenShift GitOps');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-local','Red Hat OpenShift Local');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-pipelines','Red Hat OpenShift Pipelines');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-serverless','Red Hat OpenShift Serverless');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-service-registry','Red Hat OpenShift Service Registry');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-streams-apache-kafka','Red Hat OpenShift Streams for Apache Kafka (RHOSAK)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openstack-platform','Red Hat OpenStack Platform (RHOSP)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-optaplanner','Red Hat Optaplanner');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-plug-ins-for-backstage','Red Hat Plug-ins for Backstage');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-process-automation-manager','Red Hat Process Automation Manager');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-quarkus-registry','Red Hat Quarkus Registry');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-quay','Red Hat Quay');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-satellite','Red Hat Satellite');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-service-interconnect','Red Hat Service Interconnect (formerly known as Application Interconnect)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-single-sign-on','Red Hat Single Sign-On (RHSSO)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-software-collections','Red Hat Software Collections');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-support-for-spring-boot','Red Hat support for Spring Boot');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-trusted-application-pipeline','Red Hat Trusted Application Pipeline (RHTAP)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-update-infrastructure','Red Hat Update Infrastructure (RHUI)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-virtualization','Red Hat Virtualization');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('resource-optimization','Insights Resource Optimization (ROS)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('rh-vulnerability-for-ocp','Insights Vulnerability for OCP');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('rhacs','Red Hat Advanced Cluster Security for Kubernetes (RHACS)');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('self-node-remediation','Self Node Remediation');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('subscription-central','Subscription Central');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('subscription-watch','Subscription Watch');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('telco-sw-components','Telco SW Components');
+INSERT INTO osh.offerings(offering_id,description) VALUES ('vulnerability','Vulnerability');
diff --git a/schema/schema.sql b/schema/schema.sql
new file mode 100644
index 0000000..f817daa
--- /dev/null
+++ b/schema/schema.sql
@@ -0,0 +1,81 @@
+CREATE SCHEMA osh;
+
+GRANT USAGE ON SCHEMA osh TO postgres;
+
+CREATE TABLE IF NOT EXISTS osh.offerings(
+ offering_id VARCHAR(100),
+ description VARCHAR(200),
+ PRIMARY KEY (offeringId)
+);
+
+CREATE TABLE IF NOT EXISTS osh.results(
+ results_id SERIAL,
+ datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL,
+ state BOOLEAN,
+ logs bytea,
+ task_reference VARCHAR(50),
+ PRIMARY KEY (results_id)
+);
+
+
+CREATE TABLE IF NOT EXISTS osh.scans(
+ scan_id SERIAL,
+ offering_id VARCHAR(100),
+ event_id VARCHAR(100) NOT NULL,
+ is_managed_service BOOLEAN NOT NULL,
+ component_list VARCHAR(100),
+ datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL,
+ owner VARCHAR(50) NOT NULL,
+ results SERIAL,
+ status VARCHAR (50) CONSTRAINT valid_status CHECK(status in ('PENDING', 'DELETED', 'COMPLETED', 'IN PROGRESS')),
+ last_updated TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL,
+ PRIMARY KEY(scan_id),
+ FOREIGN KEY (offering_id) REFERENCES osh.offerings(offering_id),
+ FOREIGN KEY (results) REFERENCES osh.results(results_id)
+);
+
+CREATE TABLE IF NOT EXISTS osh.archive(
+ scan_id SERIAL,
+ offering_id VARCHAR(100),
+ event_id VARCHAR(100) NOT NULL,
+ is_managed_service BOOLEAN NOT NULL,
+ component_list VARCHAR(100),
+ datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL,
+ owner VARCHAR(50) NOT NULL,
+ results SERIAL,
+ status VARCHAR (50) CONSTRAINT valid_status CHECK(status in ('PENDING', 'DELETED', 'COMPLETED', 'IN PROGRESS')),
+ last_updated TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL,
+ PRIMARY KEY(scan_id),
+ FOREIGN KEY (offering_id) REFERENCES osh.offerings(offering_id),
+ FOREIGN KEY (results) REFERENCES osh.results(results_id)
+);
+
+CREATE TABLE IF NOT EXISTS osh.gitscans (
+ id SERIAL,
+ build_system_type VARCHAR(80),
+ repository VARCHAR(150),
+ reference VARCHAR(100),
+ commit_id VARCHAR(100),
+ -- SHA256 has a length of 256 bits, so 256 bits would represent 64 hex characters
+ hashsum VARCHAR(64),
+ PRIMARY KEY(id)
+);
+
+CREATE TABLE IF NOT EXISTS osh.pncscans(
+ id SERIAL,
+ build_system_type VARCHAR(80),
+ build_id VARCHAR(100),
+ PRIMARY KEY(id)
+);
+
+CREATE TABLE IF NOT EXISTS osh.brewscans(
+ id SERIAL,
+ build_system_type VARCHAR(80),
+ brew_id VARCHAR(100),
+ brew_nvr VARCHAR(100),
+ pnc_id VARCHAR(100),
+ artifact_type VARCHAR(100),
+ file_name VARCHAR(100),
+ built_from_source BOOLEAN,
+ PRIMARY KEY(id)
+);
diff --git a/src/main/java/dto/BrewObj.java b/src/main/java/dto/BrewObj.java
index a7136c4..0497751 100644
--- a/src/main/java/dto/BrewObj.java
+++ b/src/main/java/dto/BrewObj.java
@@ -8,17 +8,22 @@ import lombok.extern.jackson.Jacksonized;
import java.io.Serializable;
-@ToString
-@Getter
@AllArgsConstructor
-@Jacksonized
@Builder
+@Getter
+@ToString
+@Jacksonized
public class BrewObj implements Serializable {
- public String buildSystemType;
- public String brewId;
- public String brewNvr;
- public String pncId;
- public String artifactType;
- public String fileName;
- public String buildFromSource;
-}
\ No newline at end of file
+
+ public static final String SQL = "INSERT INTO brewscans " +
+ "(build_system_type, brew_id, brew_nvr, pnc_id, artifact_type, file_name, built_from_source)" +
+ "VALUES (? ? ? ? ? ? ?)";
+
+ private final String buildSystemType;
+ private final String brewId;
+ private final String brewNvr;
+ private final String pncId;
+ private final String artifactType;
+ private final String fileName;
+ private final Boolean builtFromSource;
+}
diff --git a/src/main/java/dto/BrewObjPayload.java b/src/main/java/dto/BrewObjPayload.java
index 0a0709f..07f41ff 100644
--- a/src/main/java/dto/BrewObjPayload.java
+++ b/src/main/java/dto/BrewObjPayload.java
@@ -1,15 +1,20 @@
package dto;
-// import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload;
-// import org.jboss.pnc.api.dto.HeartbeatConfig;
-// import org.jboss.pnc.api.dto.Request;
-
+import org.json.JSONException;
import org.json.JSONObject;
-import java.net.URISyntaxException;
-
public class BrewObjPayload {
- public static BrewObj constructScanPayload(JSONObject brewObj) throws URISyntaxException {
- return new BrewObj(brewObj.getString("buildSystemType"),brewObj.getString("brewId"),brewObj.getString("brewNvr"),brewObj.getString("pncId"),brewObj.getString("artifactType"),brewObj.getString("fileName"),brewObj.getString("builtFromSource"));
+
+ public static BrewObj constructScanPayload(JSONObject jsonObj) throws JSONException {
+ return new BrewObj(
+ jsonObj.getString("build_system_type"),
+ jsonObj.getString("brew_id"),
+ jsonObj.getString("brew_nvr"),
+ jsonObj.getString("pnc_id"),
+ jsonObj.getString("artifact_type"),
+ jsonObj.getString("file_name"),
+ jsonObj.getBoolean("built_from_source"));
}
-}
\ No newline at end of file
+
+ private BrewObjPayload() {}
+}
diff --git a/src/main/java/dto/ConnectDB.java b/src/main/java/dto/ConnectDB.java
index 2080def..1944770 100644
--- a/src/main/java/dto/ConnectDB.java
+++ b/src/main/java/dto/ConnectDB.java
@@ -1,31 +1,25 @@
package dto;
+import org.json.JSONException;
+
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import static constants.PSGQL.*;
+// @TODO Replace hard-coded credentials; make use of our secure db connection practice
-public class ConnectDB{
- // private final String url = "jdbc:postgresql://localhost:5432/scandb";
- // private final String user = "postgres";
- // private final String password = "password";
+public class ConnectDB {
- /**
- * Connect to the PostgreSQL database
- *
- * @return a Connection object
- */
- public Connection connect() {
- Connection conn = null;
+ public Connection connect() throws JSONException {
try {
- conn = DriverManager.getConnection(url, user, password);
- System.out.println("Connected to the PostgreSQL server successfully.");
+ Connection conn = DriverManager.getConnection(url, user, password);
+ System.out.println("Connected to PostgreSQL server");
+ return conn;
} catch (SQLException e) {
System.out.println(e.getMessage());
}
-
- return conn;
+ return null;
}
}
diff --git a/src/main/java/dto/GitObj.java b/src/main/java/dto/GitObj.java
index 435ec0a..81f8d3f 100644
--- a/src/main/java/dto/GitObj.java
+++ b/src/main/java/dto/GitObj.java
@@ -5,17 +5,21 @@ import lombok.Builder;
import lombok.Getter;
import lombok.ToString;
import lombok.extern.jackson.Jacksonized;
-
import java.io.Serializable;
-@ToString
-@Getter
@AllArgsConstructor
-@Jacksonized
@Builder
+@Getter
+@ToString
+@Jacksonized
public class GitObj implements Serializable {
- public String buildSystemType;
- public String repository;
- public String reference;
- public String commitId;
+
+ public static final String SQL = "INSERT INTO gitscans " +
+ "(build_system_type, repository, reference, commit_id)" +
+ "VALUES (? ? ? ?)";
+
+ private final String buildSystemType;
+ private final String repository;
+ private final String reference;
+ private final String commitId;
}
\ No newline at end of file
diff --git a/src/main/java/dto/GitObjPayload.java b/src/main/java/dto/GitObjPayload.java
index 8d2561f..eaabab1 100644
--- a/src/main/java/dto/GitObjPayload.java
+++ b/src/main/java/dto/GitObjPayload.java
@@ -1,15 +1,17 @@
package dto;
-// import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload;
-// import org.jboss.pnc.api.dto.HeartbeatConfig;
-// import org.jboss.pnc.api.dto.Request;
-
+import org.json.JSONException;
import org.json.JSONObject;
-import java.net.URISyntaxException;
-
public class GitObjPayload {
- public static GitObj constructScanPayload(JSONObject gitObj) throws URISyntaxException {
- return new GitObj(gitObj.getString("buildSystemType"),gitObj.getString("repository"),gitObj.getString("reference"),gitObj.getString("commitId"));
+
+ public static GitObj constructScanPayload(JSONObject jsonObj) throws JSONException {
+ return new GitObj(
+ jsonObj.getString("build_system_type"),
+ jsonObj.getString("repository"),
+ jsonObj.getString("reference"),
+ jsonObj.getString("commit_id"));
}
-}
\ No newline at end of file
+
+ private GitObjPayload() {}
+}
diff --git a/src/main/java/dto/PncObj.java b/src/main/java/dto/PncObj.java
index 7ce1a1a..14b9f7d 100644
--- a/src/main/java/dto/PncObj.java
+++ b/src/main/java/dto/PncObj.java
@@ -5,15 +5,17 @@ import lombok.Builder;
import lombok.Getter;
import lombok.ToString;
import lombok.extern.jackson.Jacksonized;
-
import java.io.Serializable;
-@ToString
-@Getter
@AllArgsConstructor
-@Jacksonized
@Builder
+@Getter
+@ToString
+@Jacksonized
public class PncObj implements Serializable {
- public String buildSystemType;
- public String buildId;
+
+ public static final String SQL = "INSERT INTO pncscans (build_system_type, build_id) VALUES (? ?)";
+
+ private final String buildSystemType;
+ private final String buildId;
}
\ No newline at end of file
diff --git a/src/main/java/dto/PncObjPayload.java b/src/main/java/dto/PncObjPayload.java
index 3f83508..a8f313c 100644
--- a/src/main/java/dto/PncObjPayload.java
+++ b/src/main/java/dto/PncObjPayload.java
@@ -1,15 +1,15 @@
package dto;
-// import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload;
-// import org.jboss.pnc.api.dto.HeartbeatConfig;
-// import org.jboss.pnc.api.dto.Request;
-
+import org.json.JSONException;
import org.json.JSONObject;
-import java.net.URISyntaxException;
-
public class PncObjPayload {
- public static PncObj constructScanPayload(JSONObject pncObj) throws URISyntaxException {
- return new PncObj(pncObj.getString("buildSystemType"),pncObj.getString("buildId"));
+
+ public static PncObj constructScanPayload(JSONObject jsonObj) throws JSONException {
+ return new PncObj(
+ jsonObj.getString("build_system_type"),
+ jsonObj.getString("build_id"));
}
-}
\ No newline at end of file
+
+ private PncObjPayload() {}
+}
diff --git a/src/main/java/dto/ScanObj.java b/src/main/java/dto/ScanObj.java
index a8d835b..fe0821e 100644
--- a/src/main/java/dto/ScanObj.java
+++ b/src/main/java/dto/ScanObj.java
@@ -5,19 +5,24 @@ import lombok.Builder;
import lombok.Getter;
import lombok.ToString;
import lombok.extern.jackson.Jacksonized;
+import java.io.Serializable;
import java.io.Serializable;
-@ToString
-@Getter
@AllArgsConstructor
-@Jacksonized
@Builder
+@Getter
+@ToString
+@Jacksonized
public class ScanObj implements Serializable {
- public String scanId;
- public String productId;
- public String eventId;
- public String isManagedService;
- public String componentList;
+ public static final String SQL = "INSERT INTO scans " +
+ "(scan_id, offering_id, event_id, is_managed_service, component_list) " +
+ "VALUES (? ? ? ? ?)";
+
+ private final String scanId;
+ private final String productId;
+ private final String eventId;
+ private final String isManagedService;
+ private final String componentList;
}
\ No newline at end of file
diff --git a/src/main/java/dto/ScanObjPayload.java b/src/main/java/dto/ScanObjPayload.java
index b44c92f..a914cc4 100644
--- a/src/main/java/dto/ScanObjPayload.java
+++ b/src/main/java/dto/ScanObjPayload.java
@@ -1,15 +1,17 @@
package dto;
-// import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload;
-// import org.jboss.pnc.api.dto.HeartbeatConfig;
-// import org.jboss.pnc.api.dto.Request;
-
+import org.json.JSONException;
import org.json.JSONObject;
-import java.net.URISyntaxException;
-
public class ScanObjPayload {
- public static ScanObj constructScanPayload(JSONObject scanObj) throws URISyntaxException {
- return new ScanObj(scanObj.getString("scanId"),scanObj.getString("productId"),scanObj.getString("eventId"),scanObj.getString("isManagedService"),scanObj.getString("componentList"));
+ public static ScanObj constructScanPayload(JSONObject jsonObj) throws JSONException {
+ return new ScanObj(
+ jsonObj.getString("scan_id"),
+ jsonObj.getString("offering_id"),
+ jsonObj.getString("event_id"),
+ jsonObj.getString("is_managed_service"),
+ jsonObj.getString("component_list"));
}
-}
\ No newline at end of file
+
+ private ScanObjPayload() {}
+}
diff --git a/src/main/java/rest/CreateGetResource.java b/src/main/java/rest/CreateGetResource.java
index 2c1c6bf..67d9bea 100644
--- a/src/main/java/rest/CreateGetResource.java
+++ b/src/main/java/rest/CreateGetResource.java
@@ -1,62 +1,59 @@
package rest;
-import dto.ConnectDB;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.Set;
+
import dto.ScanObj;
-import io.quarkiverse.kerberos.KerberosPrincipal;
-import io.quarkus.security.Authenticated;
-import io.quarkus.security.identity.SecurityIdentity;
+import dto.ConnectDB;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import javax.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import java.sql.Connection;
+import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Collections;
-import java.util.LinkedHashMap;
-import java.util.Set;
-
-
-// import org.hibernate.EntityManager;
-
// @Path("/api/v1/[osh-scan]")
@Path("/scanGet")
@Authenticated
public class CreateGetResource {
+ private static final Logger logger = LoggerFactory.getLogger(CreateGetResource.class);
CreateScanService createScanService;
private Set Scans = Collections.newSetFromMap(Collections.synchronizedMap(new LinkedHashMap<>()));
-
public CreateGetResource() {
-
+ // LDB: @TODO either put some code here or remove this not used public constructor
}
@GET
@Path("/{scanId}")
public Set list(@PathParam("scanId") String scanId) {
//use to return specific scanIds just use usual fetch from sets, will be querying hte db directly here
- try {
- ConnectDB connectDB = new ConnectDB();
- Connection conn = connectDB.connect();
- Statement stmt = null;
- String sql = "SELECT * FROM scans WHERE scanid=" +scanId;
- stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery(sql);
-
+ ConnectDB connectDB = new ConnectDB();
+ String sql = "SELECT * FROM scans WHERE scan_id=?";
+ try(Connection conn = connectDB.connect();
+ PreparedStatement pstmt = conn.prepareStatement(sql)) {
+ pstmt.setString(1, scanId);
+ ResultSet rs = pstmt.executeQuery();
while (rs.next()) {
//very ugly solution needs some change to where we put the query
- Scans.add(new ScanObj(rs.getString("scanid"),rs.getString("productid"),rs.getString("eventid"),rs.getString("ismanagedservice"),rs.getString("componentlist")));
- conn.close();
+ Scans.add(new ScanObj(
+ rs.getString("scan_id"),
+ rs.getString("offering_id"),
+ rs.getString("event_id"),
+ rs.getString("is_managed_service"),
+ rs.getString("component_list")));
}
- } catch (SQLException e){
- System.out.println(e);
+ } catch (SQLException e) {
+ logger.error(e.getMessage());
}
return Scans;
}
-}
\ No newline at end of file
+}
diff --git a/src/main/java/rest/CreateScanRequest.java b/src/main/java/rest/CreateScanRequest.java
index 6d0a833..85327c2 100644
--- a/src/main/java/rest/CreateScanRequest.java
+++ b/src/main/java/rest/CreateScanRequest.java
@@ -1,46 +1,56 @@
package rest;
-import dto.*;
-import io.quarkus.security.Authenticated;
+import dto.BrewObj;
+import dto.ConnectDB;
+import dto.BrewObjPayload;
+import dto.GitObj;
+import dto.GitObjPayload;
+import dto.PncObj;
+import dto.PncObjPayload;
+
import org.eclipse.microprofile.rest.client.inject.RestClient;
+import org.json.JSONException;
import org.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import javax.validation.Valid;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
-import java.net.URISyntaxException;
import java.sql.Connection;
-import java.sql.ResultSet;
+import java.sql.PreparedStatement;
import java.sql.SQLException;
-import java.sql.Statement;
@Authenticated
@Path("/scanRequest")
public class CreateScanRequest {
- //all of these need cleaning up to be a more sensible solution
+ private static final Logger logger = LoggerFactory.getLogger(CreateScanRequest.class);
+
@RestClient
CreateScanService createScanService;
@POST
@Path("/brew")
@Consumes({ "application/json" })
- //in theory should take List to clean it up
- public BrewObj invokeScanAnalyze(@Valid String scanInvocation) throws URISyntaxException {
+ // in theory should take List to clean it up
+ public BrewObj invokeBrewScanAnalyze(@Valid String scanInvocation) throws JSONException {
JSONObject jsonData = new JSONObject(scanInvocation);
BrewObj brewObj = BrewObjPayload.constructScanPayload(jsonData);
-
ConnectDB connectDB = new ConnectDB();
- Connection conn = connectDB.connect();
- Statement stmt = null;
- String sql = "INSERT INTO brewscans (buildsystemtype, brewid, brewnvr, pncid, artifacttype, filename, builtfromsource) VALUES ('"+brewObj.buildSystemType+"','"+brewObj.brewId+"','"+brewObj.brewNvr+"','"+brewObj.pncId+"','"+brewObj.artifactType+"','"+brewObj.fileName+"','"+brewObj.buildFromSource+"')";
- try{
- stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery(sql);
- conn.close();
- } catch (SQLException e){
- System.out.println(e);
+ try(Connection conn = connectDB.connect();
+ PreparedStatement pstmt = conn.prepareStatement(BrewObj.SQL)) {
+ pstmt.setString(1, brewObj.getBuildSystemType());
+ pstmt.setString(2, brewObj.getBrewId());
+ pstmt.setString(3, brewObj.getBrewNvr());
+ pstmt.setString(4, brewObj.getPncId());
+ pstmt.setString(5, brewObj.getArtifactType());
+ pstmt.setString(6, brewObj.getFileName());
+ pstmt.setBoolean(7, brewObj.getBuiltFromSource());
+ pstmt.executeUpdate();
+ } catch (SQLException e) {
+ logger.error(e.getMessage());
}
return brewObj;
}
@@ -48,42 +58,38 @@ public class CreateScanRequest {
@POST
@Path("/git")
@Consumes({ "application/json" })
- public GitObj invokeGitScanAnalyze(@Valid String scanInvocation)throws URISyntaxException {
+ public GitObj invokeGitScanAnalyze(@Valid String scanInvocation)throws JSONException {
JSONObject jsonData = new JSONObject(scanInvocation);
GitObj gitObj = GitObjPayload.constructScanPayload(jsonData);
-
ConnectDB connectDB = new ConnectDB();
- Connection conn = connectDB.connect();
- Statement stmt = null;
- String sql = "INSERT INTO gitscans (buildsystemtype, repository, reference, commitid) VALUES ('"+gitObj.buildSystemType+"','"+gitObj.repository+"','"+gitObj.reference+"','"+gitObj.commitId+"')";
- try{
- stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery(sql);
- conn.close();
- } catch (SQLException e){
- System.out.println(e);
- }
+ try(Connection conn = connectDB.connect();
+ PreparedStatement pstmt = conn.prepareStatement(GitObj.SQL)) {
+ pstmt.setString(1, gitObj.getBuildSystemType());
+ pstmt.setString(2, gitObj.getRepository());
+ pstmt.setString(3, gitObj.getReference());
+ pstmt.setString(4, gitObj.getCommitId());
+ pstmt.executeUpdate();
+ } catch (SQLException e) {
+ logger.error(e.getMessage());
+ }
return gitObj;
}
@POST
@Path("/pnc")
@Consumes({ "application/json" })
- public PncObj invokePncScanAnalyze(@Valid String scanInvocation)throws URISyntaxException {
+ public PncObj invokePncScanAnalyze(@Valid String scanInvocation)throws JSONException {
JSONObject jsonData = new JSONObject(scanInvocation);
PncObj pncObj = PncObjPayload.constructScanPayload(jsonData);
-
ConnectDB connectDB = new ConnectDB();
- Connection conn = connectDB.connect();
- Statement stmt = null;
- String sql = "INSERT INTO pncscans (buildsystemtype, buildid) VALUES ('"+pncObj.buildSystemType+"','"+pncObj.buildId+"')";
- try{
- stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery(sql);
- conn.close();
- } catch (SQLException e){
- System.out.println(e);
- }
+ try(Connection conn = connectDB.connect();
+ PreparedStatement pstmt = conn.prepareStatement(PncObj.SQL)) {
+ pstmt.setString(1, pncObj.getBuildSystemType());
+ pstmt.setString(2, pncObj.getBuildId());
+ pstmt.executeUpdate();
+ } catch (SQLException e) {
+ logger.error(e.getMessage());
+ }
return pncObj;
}
}
diff --git a/src/main/java/rest/CreateScanResource.java b/src/main/java/rest/CreateScanResource.java
index 417ccea..35564d7 100644
--- a/src/main/java/rest/CreateScanResource.java
+++ b/src/main/java/rest/CreateScanResource.java
@@ -1,44 +1,52 @@
package rest;
import dto.ConnectDB;
+import dto.ScanObjPayload;
import dto.ScanObj;
import dto.ScanObjPayload;
import org.eclipse.microprofile.rest.client.inject.RestClient;
import org.json.JSONObject;
+import org.eclipse.microprofile.rest.client.inject.RestClient;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import javax.validation.Valid;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
-import java.net.URISyntaxException;
import java.sql.Connection;
-import java.sql.ResultSet;
+import java.sql.PreparedStatement;
import java.sql.SQLException;
-import java.sql.Statement;
@Path("/")
public class CreateScanResource {
+ private static final Logger logger = LoggerFactory.getLogger(CreateScanResource.class);
+
@RestClient
CreateScanService createScanService;
@POST
@Consumes({ "application/json" })
//in theory should take List to clean it up
- public ScanObj invokeScanAnalyze(@Valid String scanInvocation) throws URISyntaxException {
+ public ScanObj invokeScanAnalyze(@Valid String scanInvocation) throws JSONException {
JSONObject jsonData = new JSONObject(scanInvocation);
ScanObj scanObj = ScanObjPayload.constructScanPayload(jsonData);
ConnectDB connectDB = new ConnectDB();
- Connection conn = connectDB.connect();
- Statement stmt = null;
- String sql = "INSERT INTO scans (scanid, productid, eventid, ismanagedservice, componentlist) VALUES ('" +scanObj.scanId+"', '"+scanObj.productId+"', '"+scanObj.eventId+"', '"+scanObj.isManagedService+"', '"+scanObj.componentList+"')";
- try{
- stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery(sql);
- conn.close();
- } catch (SQLException e){
- System.out.println(e);
- }
+ try(Connection conn = connectDB.connect();
+ PreparedStatement pstmt = conn.prepareStatement(ScanObj.SQL)) {
+ pstmt.setString(1, scanObj.getScanId());
+ pstmt.setString(2, scanObj.getProductId());
+ pstmt.setString(3, scanObj.getEventId());
+ pstmt.setString(4, scanObj.getIsManagedService());
+ pstmt.setString(5, scanObj.getComponentList());
+ pstmt.executeUpdate();
+ } catch (SQLException e) {
+ logger.error(e.getMessage());
+ }
return scanObj;
}
}
diff --git a/src/main/java/rest/CreateStartScan.java b/src/main/java/rest/CreateStartScan.java
index 4bade54..5461722 100644
--- a/src/main/java/rest/CreateStartScan.java
+++ b/src/main/java/rest/CreateStartScan.java
@@ -5,54 +5,94 @@ import dto.ScanObj;
import io.quarkus.security.Authenticated;
import org.eclipse.microprofile.rest.client.inject.RestClient;
-import javax.ws.rs.PUT;
+import org.eclipse.microprofile.rest.client.inject.RestClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import javax.ws.rs.Path;
+import javax.ws.rs.PUT;
import javax.ws.rs.PathParam;
-import java.net.URISyntaxException;
import java.sql.Connection;
+import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
-import java.sql.Statement;
@Authenticated
@Path("/startScan")
public class CreateStartScan {
+ private static final Logger logger = LoggerFactory.getLogger(CreateStartScan.class);
+
@RestClient
CreateScanService createScanService;
@PUT
@Path("/{scanId}")
- public ScanObj invokeScanAnalyze(@PathParam("scanId") String scanId) throws URISyntaxException {
+ public ScanObj invokeScanAnalyze(@PathParam("scanId") String scanId) {
+ ScanObj finalScan = null;
ConnectDB connectDB = new ConnectDB();
- Connection conn = connectDB.connect();
- //this is ugly needs to berewritten
- Statement stmt = null;
+ try (Connection conn = connectDB.connect()) {
+ finalScan = selectDataForArchiving(conn, scanId);
+ if (finalScan != null) {
+ archiveSelectedScans(conn, finalScan);
+ postArchivingCleanup(conn, scanId);
+ } else {
+ logger.warn("No data match found for scan ID=" + scanId);
+ }
+ } catch (SQLException e) {
+ logger.error(e.getMessage());
+ }
+ return finalScan;
+ }
+
+ private ScanObj selectDataForArchiving(Connection conn, String scanId) {
ScanObj finalScan = null;
+ String sql = "SELECT * FROM scans WHERE scan_id=?";
+ try (PreparedStatement pstmt = conn.prepareStatement(sql)) {
+ pstmt.setString(1, scanId);
+ ResultSet rs = pstmt.executeQuery();
- String sql = "SELECT * FROM scans WHERE scanid=" + scanId;
- //need to add figure out an archieve system and wether its nessacery (archieve value??)
- try{
- stmt = conn.createStatement();
- //terrible solution has to be a better way of doing this
- ResultSet rs = stmt.executeQuery(sql);
-
- //fix for individual results (not resultset)
- //TODO: need to add unique keys to DBs
- finalScan = new ScanObj(rs.getString("scanid"),rs.getString("productid"),rs.getString("eventid"),rs.getString("ismanagedservice"),rs.getString("componentlist"));
- String copySql = "INSERT INTO archive (scanid, productid, eventid, ismanagedservice, componentlist) VALUES ('" +finalScan.scanId+"', '"+finalScan.productId+"', '"+finalScan.eventId+"', '"+finalScan.isManagedService+"', '"+finalScan.componentList+"')";
- stmt.executeUpdate(copySql);
-
- //TODO add proper checks
- String deleteSql = "DELETE FROM scans WHERE scanid=" + scanId;
- stmt.executeUpdate(deleteSql);
-
- //send task to the actual interface here using the resultset returned (should multiple scanids be allowed):
- //once the task is complete AND we have confirmation that the scan is done run the following sql
- conn.close();
- } catch (SQLException e){
- System.out.println(e);
- }
+ // TODO: need to add unique keys to DBs
+ // fix for individual results (not resultset)
+ finalScan = new ScanObj(
+ rs.getString("scan_id"),
+ rs.getString("offering_id"),
+ rs.getString("event_id"),
+ rs.getString("is_managed_service"),
+ rs.getString("component_list"));
+ } catch (SQLException e) {
+ logger.error(e.getMessage());
+ }
return finalScan;
}
+
+ private void archiveSelectedScans(Connection conn, ScanObj finalScan) {
+ String sql = "INSERT INTO archive " +
+ "(scan_id, offering_id, event_id, is_managed_service, component_list) " +
+ "VALUES (? ? ? ? ?)";
+ try (PreparedStatement pstmt = conn.prepareStatement(sql)) {
+ pstmt.setString(1, finalScan.getScanId());
+ pstmt.setString(2, finalScan.getProductId());
+ pstmt.setString(3, finalScan.getEventId());
+ pstmt.setString(4, finalScan.getIsManagedService());
+ pstmt.setString(5, finalScan.getComponentList());
+ pstmt.executeUpdate();
+ } catch (SQLException e) {
+ logger.error(e.getMessage());
+ }
+ }
+
+ private void postArchivingCleanup(Connection conn, String scanId) {
+ // TODO add proper checks
+ // send task to the actual interface here using the resultset returned (should multiple scanids be allowed):
+ // once the task is complete AND we have confirmation that the scan is done run the following sql
+
+ String sql = "DELETE FROM scans WHERE scan_id=?";
+ try (PreparedStatement pstmt = conn.prepareStatement(sql)) {
+ pstmt.setString(1, scanId);
+ pstmt.executeUpdate();
+ } catch (SQLException e) {
+ logger.error(e.getMessage());
+ }
+ }
}
diff --git a/src/main/java/rest/RemoveScan.java b/src/main/java/rest/RemoveScan.java
index e8829ef..783873c 100644
--- a/src/main/java/rest/RemoveScan.java
+++ b/src/main/java/rest/RemoveScan.java
@@ -1,48 +1,43 @@
package rest;
import dto.ConnectDB;
-import dto.ScanObj;
+
import org.eclipse.microprofile.rest.client.inject.RestClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import javax.ws.rs.DELETE;
import javax.ws.rs.Path;
+import javax.ws.rs.DELETE;
import javax.ws.rs.PathParam;
-import java.net.URISyntaxException;
import java.sql.Connection;
+import java.sql.PreparedStatement;
import java.sql.SQLException;
-import java.sql.Statement;
@Path("/deleteScan")
public class RemoveScan {
+ private static final Logger logger = LoggerFactory.getLogger(RemoveScan.class);
+
// @Inject
@RestClient
CreateScanService createScanService;
- // ScanObjPayload scanObjPayload;
@DELETE
@Path("/{scanId}")
- public boolean invokeScanAnalyze(@PathParam("scanId") String scanId) throws URISyntaxException {
+ public boolean invokeScanAnalyze(@PathParam("scanId") String scanId) {
+ boolean rc = false;
+ //send task to the actual interface here using the resultset returned (should multiple scanids be allowed):
+ //once the task is complete AND we have confirmation that the scan is done run the following sql
+ String qry = "DELETE FROM scans WHERE scan_id=?";
ConnectDB connectDB = new ConnectDB();
- Connection conn = connectDB.connect();
- //this is ugly needs to berewritten
- Statement stmt = null;
- ScanObj finalScan = null;
- //fix this
- Boolean success = false;
- String sql = "DELETE FROM scans WHERE scanid=" + scanId;
- //need to add figure out an archieve system and wether its nessacery (archieve value??)
- try{
- stmt = conn.createStatement();
- //TODO add proper checks
- stmt.executeUpdate(sql);
- //send task to the actual interface here using the resultset returned (should multiple scanids be allowed):
- //once the task is complete AND we have confirmation that the scan is done run the following sql
- conn.close();
- } catch (SQLException e){
- System.out.println(e);
- }
- success = true;
- return success;
+ try(Connection conn = connectDB.connect();
+ PreparedStatement pstmt = conn.prepareStatement(qry)) {
+ pstmt.setString(1, scanId);
+ pstmt.executeUpdate();
+ rc = true;
+ } catch (SQLException e) {
+ logger.error(e.getMessage());
+ }
+ return rc;
}
}
diff --git a/src/test/java/dto/TestPayload.java b/src/test/java/dto/TestPayload.java
new file mode 100644
index 0000000..51980ac
--- /dev/null
+++ b/src/test/java/dto/TestPayload.java
@@ -0,0 +1,107 @@
+package dto;
+
+import org.json.JSONObject;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import static org.junit.jupiter.api.Assertions.*;
+
+class TestPayload {
+
+ private static final Logger logger = LoggerFactory.getLogger(TestPayload.class);
+
+ @Test
+ void TestBrew() {
+ JSONObject jsonObject = new JSONObject();
+ jsonObject.put("build_system_type", "brew");
+ jsonObject.put("brew_id", "1");
+ jsonObject.put("brew_nvr", "1.1.0");
+ jsonObject.put("pnc_id", "153");
+ jsonObject.put("artifact_type", "arti");
+ jsonObject.put("file_name", "myfile");
+ jsonObject.put("built_from_source", true);
+
+ BrewObj brewObj1 = BrewObjPayload.constructScanPayload(jsonObject);
+ BrewObj brewObj2 = new BrewObj(
+ jsonObject.getString("build_system_type"),
+ jsonObject.getString("brew_id"),
+ jsonObject.getString("brew_nvr"),
+ jsonObject.getString("pnc_id"),
+ jsonObject.getString("artifact_type"),
+ jsonObject.getString("file_name"),
+ jsonObject.getBoolean("built_from_source"));
+
+ logger.info("BrewObj1: " + brewObj1.toString());
+ logger.info("BrewObj2: " + brewObj2.toString());
+ assertEquals(brewObj1.getBuildSystemType(), brewObj2.getBuildSystemType());
+ assertEquals(brewObj1.getBrewId(), brewObj2.getBrewId());
+ assertEquals(brewObj1.getBrewNvr(), brewObj2.getBrewNvr());
+ assertEquals(brewObj1.getPncId(), brewObj2.getPncId());
+ assertEquals(brewObj1.getArtifactType(), brewObj2.getArtifactType());
+ assertEquals(brewObj1.getFileName(), brewObj2.getFileName());
+ assert(brewObj1.getBuiltFromSource() == brewObj2.getBuiltFromSource());
+ }
+
+ @Test
+ void TestGit() {
+ JSONObject jsonObject = new JSONObject();
+ jsonObject.put("build_system_type", "git");
+ jsonObject.put("repository", "repo");
+ jsonObject.put("reference", "ref");
+ jsonObject.put("commit_id", "c6385a754421a57cd0a26ccba187cd687c8d1258");
+
+ GitObj gitObj1 = GitObjPayload.constructScanPayload(jsonObject);
+ GitObj gitObj2 = new GitObj(
+ jsonObject.getString("build_system_type"),
+ jsonObject.getString("repository"),
+ jsonObject.getString("reference"),
+ jsonObject.getString("commit_id"));
+ logger.info("GitObj1: " + gitObj1.toString());
+ logger.info("GitObj2: " + gitObj2.toString());
+ assertEquals(gitObj1.getBuildSystemType(), gitObj2.getBuildSystemType());
+ assertEquals(gitObj1.getRepository(), gitObj2.getRepository());
+ assertEquals(gitObj1.getReference(), gitObj2.getReference());
+ assertEquals(gitObj1.getCommitId(), gitObj2.getCommitId());
+ }
+
+ @Test
+ void TestPnc() {
+ JSONObject jsonObject = new JSONObject();
+ jsonObject.put("build_system_type", "pnc");
+ jsonObject.put("build_id", "153");
+
+ PncObj pncObj1 = PncObjPayload.constructScanPayload(jsonObject);
+ PncObj pncObj2 = new PncObj(
+ jsonObject.getString("build_system_type"),
+ jsonObject.getString("build_id"));
+ logger.info("PncObj1: " + pncObj1.toString());
+ logger.info("PncObj2: " + pncObj2.toString());
+ assertEquals(pncObj1.getBuildSystemType(), pncObj2.getBuildSystemType());
+ assertEquals(pncObj1.getBuildId(), pncObj2.getBuildId());
+ }
+
+ @Test
+ void TestScan() {
+ JSONObject jsonObject = new JSONObject();
+ jsonObject.put("scan_id", "ABC");
+ jsonObject.put("offering_id", "product#");
+ jsonObject.put("event_id", "event#");
+ jsonObject.put("is_managed_service", "TRUE");
+ jsonObject.put("component_list", "components");
+
+ ScanObj scanObj1 = ScanObjPayload.constructScanPayload(jsonObject);
+ ScanObj scanObj2 = new ScanObj(
+ jsonObject.getString("scan_id"),
+ jsonObject.getString("offering_id"),
+ jsonObject.getString("event_id"),
+ jsonObject.getString("is_managed_service"),
+ jsonObject.getString("component_list"));
+ logger.info("ScanObj1: " + scanObj1.toString());
+ logger.info("ScanObj2: " + scanObj2.toString());
+ assertEquals(scanObj1.getScanId(), scanObj2.getScanId());
+ assertEquals(scanObj1.getProductId(), scanObj2.getProductId());
+ assertEquals(scanObj1.getEventId(), scanObj2.getEventId());
+ assertEquals(scanObj1.getIsManagedService(), scanObj2.getIsManagedService());
+ assertEquals(scanObj1.getComponentList(), scanObj2.getComponentList());
+ }
+}