21 changed files with 656 additions and 224 deletions
@ -0,0 +1,12 @@
|
||||
.dcignore |
||||
.idea |
||||
*.iml |
||||
|
||||
dev/ |
||||
|
||||
# Maven |
||||
target/ |
||||
pom.xml.tag |
||||
pom.xml.releaseBackup |
||||
pom.xml.versionsBackup |
||||
release.properties |
||||
@ -0,0 +1,30 @@
|
||||
from bs4 import BeautifulSoup |
||||
import requests |
||||
import re |
||||
import csv |
||||
|
||||
results = {} |
||||
|
||||
URL = "https://product-security.pages.redhat.com/offering-registry/" |
||||
r = requests.get(URL) |
||||
|
||||
soup = BeautifulSoup(r.text, 'html.parser') |
||||
table = soup.find("table") |
||||
rows = table.findAll("tr") |
||||
|
||||
for row in rows: |
||||
for elem in row.contents: |
||||
if row.contents[1].text == 'Offering': |
||||
break |
||||
else: |
||||
# We extract the short name of the URL |
||||
re_search = re.search('/offering-registry/offerings/(.*)/', row.contents[1].contents[0].attrs["href"]) |
||||
results[re_search.group(1)] = row.contents[1].contents[0].text |
||||
break |
||||
|
||||
print(results) |
||||
|
||||
with open('offerings.csv', 'w') as csv_file: |
||||
writer = csv.writer(csv_file) |
||||
for key, value in results.items(): |
||||
writer.writerow([key, value]) |
||||
@ -0,0 +1,126 @@
|
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-automation-platform','Ansible Automation Platform (AAP)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('advisor','Insights Advisor'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-on-aws','Ansible on AWS'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-on-azure','Ansible on Azure'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-on-gcp','Ansible on GCP'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-wisdom-service','Ansible Wisdom Service'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('cert-manager','cert-manager Operator for Red Hat OpenShift'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('compliance','Insights Compliance'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('connected-customer-experience','Connected Customer Experience (CCX)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('cost-management','Cost Management'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('custom-metric-autoscaler','OpenShift Custom Metrics Autoscaler'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('developer-sandbox-for-red-hat-openshift','Developer Sandbox for Red Hat OpenShift'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('dotnet','.NET'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('drift','Insights Drift'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('eclipse-vertx','Red Hat build of Eclipse Vert.x'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('edge-management','Edge Management'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('eventing','Insights Eventing'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('fastdatapath','RHEL Fast Datapath'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('host-management-services','Host Management Services'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('hosted-control-planes','Hosted Control Planes (Hypershift)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('hybrid-application-console','Hybrid Application Console (HAC)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('insights-essential','Insights Essentials'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('kernel-module-management','Kernel Module Management'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('logging-subsystem-for-red-hat-openshift','Logging Subsystem for Red Hat OpenShift'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('lvms-operator','LVMS Operator'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('malware-detection','Insights Malware Detection'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('mgmt-platform','Management Platform'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('migration-toolkit-for-applications','Migration Toolkit for Applications (MTA)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('migration-toolkit-for-containers','Migration Toolkit for Containers (MTC)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('migration-toolkit-for-runtimes','Migration Toolkit for Runtimes (MTR)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('migration-toolkit-for-virtualization','Migration Toolkit for Virtualization (MTV)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('network-observability-operator','Network Observability Operator'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('node-healthcheck-operator','Node HealthCheck Operator'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('node-maintenance-operator','Node Maintenance Operator'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('nvidia-gpu-add-on','NVIDIA GPU Add-On'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('oadp','OpenShift API for Data Protection'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-container-platform','Openshift Container Platform (OCP)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-container-storage','OpenShift Container Storage (OCS)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-data-foundation-managed-service','Red Hat OpenShift Data Foundation Managed Service'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-dedicated','OpenShift Dedicated (OSD/ROSA)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-developer-tools-and-services-helm','OpenShift Developer Tools and Services (Helm)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-developer-tools-and-services-jenkins','OpenShift Developer Tools and Services (Jenkins)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-distributed-tracing','OpenShift Distributed Tracing'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-on-azure','Openshift on Azure (ARO)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-run-once-duration-override-operator','OpenShift Run Once Duration Override Operator'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-sandboxed-containers','Openshift Sandboxed Containers'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-secondary-scheduler-operator','OpenShift Secondary Scheduler Operator'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-servicemesh','OpenShift Service Mesh'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-virtualization','OpenShift Virtualization (CNV)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-web-terminal-operator','OpenShift Web Terminal Operator'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-winc','Windows Container Support for OpenShift'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('patch','Insights Patch'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('product-discovery','Product Discovery'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-3scale-api-management-platform','Red Hat 3scale API Management Platform'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-advanced-cluster-management','Red Hat Advanced Cluster Management (RHACM)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-broker','Red Hat AMQ Broker'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-clients','Red Hat AMQ Clients'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-interconnect','Red Hat AMQ Interconnect'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-online','Red Hat AMQ Online'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-streams','Red Hat AMQ Streams'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-build-apicurio-registry','Red Hat build of Apicurio Registry (formerly known as Integration Service Registry)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-build-quarkus','Red Hat Build of Quarkus'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-camel-extensions-quarkus','Red Hat Camel Extensions for Quarkus'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-camel-k','Red Hat Camel K'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-camel-spring-boot','Red Hat Camel for Spring Boot'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-ceph-storage','Red Hat Ceph Storage'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-certificate-system','Red Hat Certificate System (RHCS)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-certification-program','Red Hat Certification Program (rhcertification)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-code-quarkus','Red Hat Code Quarkus'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-core-os','Red Hat CoreOS'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-data-grid','Red Hat Data Grid'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-debezium','Red Hat Debezium'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-decision-manager','Red Hat Decision Manager'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-developer-hub','Red Hat Developer Hub'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-developer-toolset','Red Hat Developer Toolset (DTS)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-devtools-compilers','Red Hat Developer Tools (DevTools Compilers)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-directory-server','Red Hat Directory Server (RHDS)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-10','Red Hat Enterprise Linux (RHEL) 10'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-6','Red Hat Enterprise Linux (RHEL) 6'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-7','Red Hat Enterprise Linux (RHEL) 7'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-8','Red Hat Enterprise Linux (RHEL) 8'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-9','Red Hat Enterprise Linux (RHEL) 9'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-fuse','Red Hat Fuse'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-gluster-storage','Red Hat Gluster Storage'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-in-vehicle-os','Red Hat In-Vehicle Operating System (RHIVOS)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-jboss-core-services','Red Hat JBoss Core Services'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-jboss-eap','Red Hat JBoss Enterprise Application Platform (EAP)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-jboss-web-server','Red Hat JBoss Web Server'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-observability-service','Red Hat Observability Service'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-open-database-access','Red Hat OpenShift Database Access'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-open-shift-data-science','Red Hat OpenShift Data Science (RHODS)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openjdk','Red Hat OpenJDK'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-api-management','Red Hat OpenShift API Management'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-builds-v2','Red Hat OpenShift Builds V2'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-connectors','Red Hat OpenShift Connectors (RHOC)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-control-plane-service','Red Hat OpenShift Control Plane Service'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-data-foundation','Red Hat OpenShift Data Foundation'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-dev-spaces','Red Hat OpenShift Dev Spaces'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-gitops','Red Hat OpenShift GitOps'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-local','Red Hat OpenShift Local'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-pipelines','Red Hat OpenShift Pipelines'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-serverless','Red Hat OpenShift Serverless'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-service-registry','Red Hat OpenShift Service Registry'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-streams-apache-kafka','Red Hat OpenShift Streams for Apache Kafka (RHOSAK)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openstack-platform','Red Hat OpenStack Platform (RHOSP)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-optaplanner','Red Hat Optaplanner'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-plug-ins-for-backstage','Red Hat Plug-ins for Backstage'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-process-automation-manager','Red Hat Process Automation Manager'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-quarkus-registry','Red Hat Quarkus Registry'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-quay','Red Hat Quay'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-satellite','Red Hat Satellite'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-service-interconnect','Red Hat Service Interconnect (formerly known as Application Interconnect)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-single-sign-on','Red Hat Single Sign-On (RHSSO)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-software-collections','Red Hat Software Collections'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-support-for-spring-boot','Red Hat support for Spring Boot'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-trusted-application-pipeline','Red Hat Trusted Application Pipeline (RHTAP)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-update-infrastructure','Red Hat Update Infrastructure (RHUI)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-virtualization','Red Hat Virtualization'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('resource-optimization','Insights Resource Optimization (ROS)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('rh-vulnerability-for-ocp','Insights Vulnerability for OCP'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('rhacs','Red Hat Advanced Cluster Security for Kubernetes (RHACS)'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('self-node-remediation','Self Node Remediation'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('subscription-central','Subscription Central'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('subscription-watch','Subscription Watch'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('telco-sw-components','Telco SW Components'); |
||||
INSERT INTO osh.offerings(offering_id,description) VALUES ('vulnerability','Vulnerability'); |
||||
@ -0,0 +1,81 @@
|
||||
CREATE SCHEMA osh; |
||||
|
||||
GRANT USAGE ON SCHEMA osh TO postgres; |
||||
|
||||
CREATE TABLE IF NOT EXISTS osh.offerings( |
||||
offering_id VARCHAR(100), |
||||
description VARCHAR(200), |
||||
PRIMARY KEY (offeringId) |
||||
); |
||||
|
||||
CREATE TABLE IF NOT EXISTS osh.results( |
||||
results_id SERIAL, |
||||
datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, |
||||
state BOOLEAN, |
||||
logs bytea, |
||||
task_reference VARCHAR(50), |
||||
PRIMARY KEY (results_id) |
||||
); |
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS osh.scans( |
||||
scan_id SERIAL, |
||||
offering_id VARCHAR(100), |
||||
event_id VARCHAR(100) NOT NULL, |
||||
is_managed_service BOOLEAN NOT NULL, |
||||
component_list VARCHAR(100), |
||||
datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, |
||||
owner VARCHAR(50) NOT NULL, |
||||
results SERIAL, |
||||
status VARCHAR (50) CONSTRAINT valid_status CHECK(status in ('PENDING', 'DELETED', 'COMPLETED', 'IN PROGRESS')), |
||||
last_updated TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, |
||||
PRIMARY KEY(scan_id), |
||||
FOREIGN KEY (offering_id) REFERENCES osh.offerings(offering_id), |
||||
FOREIGN KEY (results) REFERENCES osh.results(results_id) |
||||
); |
||||
|
||||
CREATE TABLE IF NOT EXISTS osh.archive( |
||||
scan_id SERIAL, |
||||
offering_id VARCHAR(100), |
||||
event_id VARCHAR(100) NOT NULL, |
||||
is_managed_service BOOLEAN NOT NULL, |
||||
component_list VARCHAR(100), |
||||
datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, |
||||
owner VARCHAR(50) NOT NULL, |
||||
results SERIAL, |
||||
status VARCHAR (50) CONSTRAINT valid_status CHECK(status in ('PENDING', 'DELETED', 'COMPLETED', 'IN PROGRESS')), |
||||
last_updated TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, |
||||
PRIMARY KEY(scan_id), |
||||
FOREIGN KEY (offering_id) REFERENCES osh.offerings(offering_id), |
||||
FOREIGN KEY (results) REFERENCES osh.results(results_id) |
||||
); |
||||
|
||||
CREATE TABLE IF NOT EXISTS osh.gitscans ( |
||||
id SERIAL, |
||||
build_system_type VARCHAR(80), |
||||
repository VARCHAR(150), |
||||
reference VARCHAR(100), |
||||
commit_id VARCHAR(100), |
||||
-- SHA256 has a length of 256 bits, so 256 bits would represent 64 hex characters |
||||
hashsum VARCHAR(64), |
||||
PRIMARY KEY(id) |
||||
); |
||||
|
||||
CREATE TABLE IF NOT EXISTS osh.pncscans( |
||||
id SERIAL, |
||||
build_system_type VARCHAR(80), |
||||
build_id VARCHAR(100), |
||||
PRIMARY KEY(id) |
||||
); |
||||
|
||||
CREATE TABLE IF NOT EXISTS osh.brewscans( |
||||
id SERIAL, |
||||
build_system_type VARCHAR(80), |
||||
brew_id VARCHAR(100), |
||||
brew_nvr VARCHAR(100), |
||||
pnc_id VARCHAR(100), |
||||
artifact_type VARCHAR(100), |
||||
file_name VARCHAR(100), |
||||
built_from_source BOOLEAN, |
||||
PRIMARY KEY(id) |
||||
); |
||||
@ -1,15 +1,20 @@
|
||||
package dto; |
||||
|
||||
// import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload;
|
||||
// import org.jboss.pnc.api.dto.HeartbeatConfig;
|
||||
// import org.jboss.pnc.api.dto.Request;
|
||||
|
||||
import org.json.JSONException; |
||||
import org.json.JSONObject; |
||||
|
||||
import java.net.URISyntaxException; |
||||
|
||||
public class BrewObjPayload { |
||||
public static BrewObj constructScanPayload(JSONObject brewObj) throws URISyntaxException { |
||||
return new BrewObj(brewObj.getString("buildSystemType"),brewObj.getString("brewId"),brewObj.getString("brewNvr"),brewObj.getString("pncId"),brewObj.getString("artifactType"),brewObj.getString("fileName"),brewObj.getString("builtFromSource")); |
||||
|
||||
public static BrewObj constructScanPayload(JSONObject jsonObj) throws JSONException { |
||||
return new BrewObj( |
||||
jsonObj.getString("build_system_type"), |
||||
jsonObj.getString("brew_id"), |
||||
jsonObj.getString("brew_nvr"), |
||||
jsonObj.getString("pnc_id"), |
||||
jsonObj.getString("artifact_type"), |
||||
jsonObj.getString("file_name"), |
||||
jsonObj.getBoolean("built_from_source")); |
||||
} |
||||
} |
||||
|
||||
private BrewObjPayload() {} |
||||
} |
||||
|
||||
@ -1,31 +1,25 @@
|
||||
package dto; |
||||
|
||||
import org.json.JSONException; |
||||
|
||||
import java.sql.Connection; |
||||
import java.sql.DriverManager; |
||||
import java.sql.SQLException; |
||||
|
||||
import static constants.PSGQL.*; |
||||
|
||||
// @TODO Replace hard-coded credentials; make use of our secure db connection practice
|
||||
|
||||
public class ConnectDB{ |
||||
// private final String url = "jdbc:postgresql://localhost:5432/scandb";
|
||||
// private final String user = "postgres";
|
||||
// private final String password = "password";
|
||||
public class ConnectDB { |
||||
|
||||
/** |
||||
* Connect to the PostgreSQL database |
||||
* |
||||
* @return a Connection object |
||||
*/ |
||||
public Connection connect() { |
||||
Connection conn = null; |
||||
public Connection connect() throws JSONException { |
||||
try { |
||||
conn = DriverManager.getConnection(url, user, password); |
||||
System.out.println("Connected to the PostgreSQL server successfully."); |
||||
Connection conn = DriverManager.getConnection(url, user, password); |
||||
System.out.println("Connected to PostgreSQL server"); |
||||
return conn; |
||||
} catch (SQLException e) { |
||||
System.out.println(e.getMessage()); |
||||
} |
||||
|
||||
return conn; |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
@ -1,15 +1,17 @@
|
||||
package dto; |
||||
|
||||
// import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload;
|
||||
// import org.jboss.pnc.api.dto.HeartbeatConfig;
|
||||
// import org.jboss.pnc.api.dto.Request;
|
||||
|
||||
import org.json.JSONException; |
||||
import org.json.JSONObject; |
||||
|
||||
import java.net.URISyntaxException; |
||||
|
||||
public class GitObjPayload { |
||||
public static GitObj constructScanPayload(JSONObject gitObj) throws URISyntaxException { |
||||
return new GitObj(gitObj.getString("buildSystemType"),gitObj.getString("repository"),gitObj.getString("reference"),gitObj.getString("commitId")); |
||||
|
||||
public static GitObj constructScanPayload(JSONObject jsonObj) throws JSONException { |
||||
return new GitObj( |
||||
jsonObj.getString("build_system_type"), |
||||
jsonObj.getString("repository"), |
||||
jsonObj.getString("reference"), |
||||
jsonObj.getString("commit_id")); |
||||
} |
||||
} |
||||
|
||||
private GitObjPayload() {} |
||||
} |
||||
|
||||
@ -1,15 +1,15 @@
|
||||
package dto; |
||||
|
||||
// import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload;
|
||||
// import org.jboss.pnc.api.dto.HeartbeatConfig;
|
||||
// import org.jboss.pnc.api.dto.Request;
|
||||
|
||||
import org.json.JSONException; |
||||
import org.json.JSONObject; |
||||
|
||||
import java.net.URISyntaxException; |
||||
|
||||
public class PncObjPayload { |
||||
public static PncObj constructScanPayload(JSONObject pncObj) throws URISyntaxException { |
||||
return new PncObj(pncObj.getString("buildSystemType"),pncObj.getString("buildId")); |
||||
|
||||
public static PncObj constructScanPayload(JSONObject jsonObj) throws JSONException { |
||||
return new PncObj( |
||||
jsonObj.getString("build_system_type"), |
||||
jsonObj.getString("build_id")); |
||||
} |
||||
} |
||||
|
||||
private PncObjPayload() {} |
||||
} |
||||
|
||||
@ -1,15 +1,17 @@
|
||||
package dto; |
||||
|
||||
// import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload;
|
||||
// import org.jboss.pnc.api.dto.HeartbeatConfig;
|
||||
// import org.jboss.pnc.api.dto.Request;
|
||||
|
||||
import org.json.JSONException; |
||||
import org.json.JSONObject; |
||||
|
||||
import java.net.URISyntaxException; |
||||
|
||||
public class ScanObjPayload { |
||||
public static ScanObj constructScanPayload(JSONObject scanObj) throws URISyntaxException { |
||||
return new ScanObj(scanObj.getString("scanId"),scanObj.getString("productId"),scanObj.getString("eventId"),scanObj.getString("isManagedService"),scanObj.getString("componentList")); |
||||
public static ScanObj constructScanPayload(JSONObject jsonObj) throws JSONException { |
||||
return new ScanObj( |
||||
jsonObj.getString("scan_id"), |
||||
jsonObj.getString("offering_id"), |
||||
jsonObj.getString("event_id"), |
||||
jsonObj.getString("is_managed_service"), |
||||
jsonObj.getString("component_list")); |
||||
} |
||||
} |
||||
|
||||
private ScanObjPayload() {} |
||||
} |
||||
|
||||
@ -1,62 +1,59 @@
|
||||
package rest; |
||||
|
||||
import dto.ConnectDB; |
||||
import java.util.Collections; |
||||
import java.util.LinkedHashMap; |
||||
import java.util.Set; |
||||
|
||||
import dto.ScanObj; |
||||
import io.quarkiverse.kerberos.KerberosPrincipal; |
||||
import io.quarkus.security.Authenticated; |
||||
import io.quarkus.security.identity.SecurityIdentity; |
||||
import dto.ConnectDB; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
|
||||
import javax.inject.Inject; |
||||
import javax.ws.rs.GET; |
||||
import javax.ws.rs.Path; |
||||
import javax.ws.rs.PathParam; |
||||
import java.sql.Connection; |
||||
import java.sql.PreparedStatement; |
||||
import java.sql.ResultSet; |
||||
import java.sql.SQLException; |
||||
import java.sql.Statement; |
||||
import java.util.Collections; |
||||
import java.util.LinkedHashMap; |
||||
import java.util.Set; |
||||
|
||||
|
||||
// import org.hibernate.EntityManager;
|
||||
|
||||
|
||||
// @Path("/api/v1/[osh-scan]")
|
||||
@Path("/scanGet") |
||||
@Authenticated |
||||
public class CreateGetResource { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(CreateGetResource.class); |
||||
|
||||
CreateScanService createScanService; |
||||
|
||||
private Set<ScanObj> Scans = Collections.newSetFromMap(Collections.synchronizedMap(new LinkedHashMap<>())); |
||||
|
||||
|
||||
public CreateGetResource() { |
||||
|
||||
// LDB: @TODO either put some code here or remove this not used public constructor
|
||||
} |
||||
|
||||
@GET |
||||
@Path("/{scanId}") |
||||
public Set<ScanObj> list(@PathParam("scanId") String scanId) { |
||||
//use to return specific scanIds just use usual fetch from sets, will be querying hte db directly here
|
||||
try { |
||||
ConnectDB connectDB = new ConnectDB(); |
||||
Connection conn = connectDB.connect(); |
||||
Statement stmt = null; |
||||
String sql = "SELECT * FROM scans WHERE scanid=" +scanId; |
||||
stmt = conn.createStatement(); |
||||
ResultSet rs = stmt.executeQuery(sql); |
||||
|
||||
ConnectDB connectDB = new ConnectDB(); |
||||
String sql = "SELECT * FROM scans WHERE scan_id=?"; |
||||
try(Connection conn = connectDB.connect(); |
||||
PreparedStatement pstmt = conn.prepareStatement(sql)) { |
||||
pstmt.setString(1, scanId); |
||||
ResultSet rs = pstmt.executeQuery(); |
||||
while (rs.next()) { |
||||
//very ugly solution needs some change to where we put the query
|
||||
Scans.add(new ScanObj(rs.getString("scanid"),rs.getString("productid"),rs.getString("eventid"),rs.getString("ismanagedservice"),rs.getString("componentlist"))); |
||||
conn.close(); |
||||
Scans.add(new ScanObj( |
||||
rs.getString("scan_id"), |
||||
rs.getString("offering_id"), |
||||
rs.getString("event_id"), |
||||
rs.getString("is_managed_service"), |
||||
rs.getString("component_list"))); |
||||
} |
||||
} catch (SQLException e){ |
||||
System.out.println(e); |
||||
} catch (SQLException e) { |
||||
logger.error(e.getMessage()); |
||||
} |
||||
return Scans; |
||||
} |
||||
} |
||||
} |
||||
|
||||
@ -1,44 +1,52 @@
|
||||
package rest; |
||||
|
||||
import dto.ConnectDB; |
||||
import dto.ScanObjPayload; |
||||
import dto.ScanObj; |
||||
import dto.ScanObjPayload; |
||||
import org.eclipse.microprofile.rest.client.inject.RestClient; |
||||
import org.json.JSONObject; |
||||
|
||||
import org.eclipse.microprofile.rest.client.inject.RestClient; |
||||
import org.json.JSONException; |
||||
import org.json.JSONObject; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
|
||||
import javax.validation.Valid; |
||||
import javax.ws.rs.Consumes; |
||||
import javax.ws.rs.POST; |
||||
import javax.ws.rs.Path; |
||||
import java.net.URISyntaxException; |
||||
import java.sql.Connection; |
||||
import java.sql.ResultSet; |
||||
import java.sql.PreparedStatement; |
||||
import java.sql.SQLException; |
||||
import java.sql.Statement; |
||||
|
||||
@Path("/") |
||||
public class CreateScanResource { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(CreateScanResource.class); |
||||
|
||||
@RestClient |
||||
CreateScanService createScanService; |
||||
|
||||
@POST |
||||
@Consumes({ "application/json" }) |
||||
//in theory should take List<String> to clean it up
|
||||
public ScanObj invokeScanAnalyze(@Valid String scanInvocation) throws URISyntaxException { |
||||
public ScanObj invokeScanAnalyze(@Valid String scanInvocation) throws JSONException { |
||||
JSONObject jsonData = new JSONObject(scanInvocation); |
||||
ScanObj scanObj = ScanObjPayload.constructScanPayload(jsonData); |
||||
ConnectDB connectDB = new ConnectDB(); |
||||
Connection conn = connectDB.connect(); |
||||
Statement stmt = null; |
||||
String sql = "INSERT INTO scans (scanid, productid, eventid, ismanagedservice, componentlist) VALUES ('" +scanObj.scanId+"', '"+scanObj.productId+"', '"+scanObj.eventId+"', '"+scanObj.isManagedService+"', '"+scanObj.componentList+"')"; |
||||
try{ |
||||
stmt = conn.createStatement(); |
||||
ResultSet rs = stmt.executeQuery(sql); |
||||
conn.close(); |
||||
} catch (SQLException e){ |
||||
System.out.println(e); |
||||
} |
||||
try(Connection conn = connectDB.connect(); |
||||
PreparedStatement pstmt = conn.prepareStatement(ScanObj.SQL)) { |
||||
pstmt.setString(1, scanObj.getScanId()); |
||||
pstmt.setString(2, scanObj.getProductId()); |
||||
pstmt.setString(3, scanObj.getEventId()); |
||||
pstmt.setString(4, scanObj.getIsManagedService()); |
||||
pstmt.setString(5, scanObj.getComponentList()); |
||||
pstmt.executeUpdate(); |
||||
} catch (SQLException e) { |
||||
logger.error(e.getMessage()); |
||||
} |
||||
return scanObj; |
||||
} |
||||
} |
||||
|
||||
@ -1,48 +1,43 @@
|
||||
package rest; |
||||
|
||||
import dto.ConnectDB; |
||||
import dto.ScanObj; |
||||
|
||||
import org.eclipse.microprofile.rest.client.inject.RestClient; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
|
||||
import javax.ws.rs.DELETE; |
||||
import javax.ws.rs.Path; |
||||
import javax.ws.rs.DELETE; |
||||
import javax.ws.rs.PathParam; |
||||
import java.net.URISyntaxException; |
||||
import java.sql.Connection; |
||||
import java.sql.PreparedStatement; |
||||
import java.sql.SQLException; |
||||
import java.sql.Statement; |
||||
|
||||
@Path("/deleteScan") |
||||
public class RemoveScan { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(RemoveScan.class); |
||||
|
||||
// @Inject
|
||||
@RestClient |
||||
CreateScanService createScanService; |
||||
// ScanObjPayload scanObjPayload;
|
||||
|
||||
@DELETE |
||||
@Path("/{scanId}") |
||||
public boolean invokeScanAnalyze(@PathParam("scanId") String scanId) throws URISyntaxException { |
||||
public boolean invokeScanAnalyze(@PathParam("scanId") String scanId) { |
||||
boolean rc = false; |
||||
//send task to the actual interface here using the resultset returned (should multiple scanids be allowed):
|
||||
//once the task is complete AND we have confirmation that the scan is done run the following sql
|
||||
String qry = "DELETE FROM scans WHERE scan_id=?"; |
||||
ConnectDB connectDB = new ConnectDB(); |
||||
Connection conn = connectDB.connect(); |
||||
//this is ugly needs to berewritten
|
||||
Statement stmt = null; |
||||
ScanObj finalScan = null; |
||||
//fix this
|
||||
Boolean success = false; |
||||
String sql = "DELETE FROM scans WHERE scanid=" + scanId; |
||||
//need to add figure out an archieve system and wether its nessacery (archieve value??)
|
||||
try{ |
||||
stmt = conn.createStatement(); |
||||
//TODO add proper checks
|
||||
stmt.executeUpdate(sql); |
||||
//send task to the actual interface here using the resultset returned (should multiple scanids be allowed):
|
||||
//once the task is complete AND we have confirmation that the scan is done run the following sql
|
||||
conn.close(); |
||||
} catch (SQLException e){ |
||||
System.out.println(e); |
||||
} |
||||
success = true; |
||||
return success; |
||||
try(Connection conn = connectDB.connect(); |
||||
PreparedStatement pstmt = conn.prepareStatement(qry)) { |
||||
pstmt.setString(1, scanId); |
||||
pstmt.executeUpdate(); |
||||
rc = true; |
||||
} catch (SQLException e) { |
||||
logger.error(e.getMessage()); |
||||
} |
||||
return rc; |
||||
} |
||||
} |
||||
|
||||
@ -0,0 +1,107 @@
|
||||
package dto; |
||||
|
||||
import org.json.JSONObject; |
||||
import org.junit.jupiter.api.Test; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import static org.junit.jupiter.api.Assertions.*; |
||||
|
||||
class TestPayload { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TestPayload.class); |
||||
|
||||
@Test |
||||
void TestBrew() { |
||||
JSONObject jsonObject = new JSONObject(); |
||||
jsonObject.put("build_system_type", "brew"); |
||||
jsonObject.put("brew_id", "1"); |
||||
jsonObject.put("brew_nvr", "1.1.0"); |
||||
jsonObject.put("pnc_id", "153"); |
||||
jsonObject.put("artifact_type", "arti"); |
||||
jsonObject.put("file_name", "myfile"); |
||||
jsonObject.put("built_from_source", true); |
||||
|
||||
BrewObj brewObj1 = BrewObjPayload.constructScanPayload(jsonObject); |
||||
BrewObj brewObj2 = new BrewObj( |
||||
jsonObject.getString("build_system_type"), |
||||
jsonObject.getString("brew_id"), |
||||
jsonObject.getString("brew_nvr"), |
||||
jsonObject.getString("pnc_id"), |
||||
jsonObject.getString("artifact_type"), |
||||
jsonObject.getString("file_name"), |
||||
jsonObject.getBoolean("built_from_source")); |
||||
|
||||
logger.info("BrewObj1: " + brewObj1.toString()); |
||||
logger.info("BrewObj2: " + brewObj2.toString()); |
||||
assertEquals(brewObj1.getBuildSystemType(), brewObj2.getBuildSystemType()); |
||||
assertEquals(brewObj1.getBrewId(), brewObj2.getBrewId()); |
||||
assertEquals(brewObj1.getBrewNvr(), brewObj2.getBrewNvr()); |
||||
assertEquals(brewObj1.getPncId(), brewObj2.getPncId()); |
||||
assertEquals(brewObj1.getArtifactType(), brewObj2.getArtifactType()); |
||||
assertEquals(brewObj1.getFileName(), brewObj2.getFileName()); |
||||
assert(brewObj1.getBuiltFromSource() == brewObj2.getBuiltFromSource()); |
||||
} |
||||
|
||||
@Test |
||||
void TestGit() { |
||||
JSONObject jsonObject = new JSONObject(); |
||||
jsonObject.put("build_system_type", "git"); |
||||
jsonObject.put("repository", "repo"); |
||||
jsonObject.put("reference", "ref"); |
||||
jsonObject.put("commit_id", "c6385a754421a57cd0a26ccba187cd687c8d1258"); |
||||
|
||||
GitObj gitObj1 = GitObjPayload.constructScanPayload(jsonObject); |
||||
GitObj gitObj2 = new GitObj( |
||||
jsonObject.getString("build_system_type"), |
||||
jsonObject.getString("repository"), |
||||
jsonObject.getString("reference"), |
||||
jsonObject.getString("commit_id")); |
||||
logger.info("GitObj1: " + gitObj1.toString()); |
||||
logger.info("GitObj2: " + gitObj2.toString()); |
||||
assertEquals(gitObj1.getBuildSystemType(), gitObj2.getBuildSystemType()); |
||||
assertEquals(gitObj1.getRepository(), gitObj2.getRepository()); |
||||
assertEquals(gitObj1.getReference(), gitObj2.getReference()); |
||||
assertEquals(gitObj1.getCommitId(), gitObj2.getCommitId()); |
||||
} |
||||
|
||||
@Test |
||||
void TestPnc() { |
||||
JSONObject jsonObject = new JSONObject(); |
||||
jsonObject.put("build_system_type", "pnc"); |
||||
jsonObject.put("build_id", "153"); |
||||
|
||||
PncObj pncObj1 = PncObjPayload.constructScanPayload(jsonObject); |
||||
PncObj pncObj2 = new PncObj( |
||||
jsonObject.getString("build_system_type"), |
||||
jsonObject.getString("build_id")); |
||||
logger.info("PncObj1: " + pncObj1.toString()); |
||||
logger.info("PncObj2: " + pncObj2.toString()); |
||||
assertEquals(pncObj1.getBuildSystemType(), pncObj2.getBuildSystemType()); |
||||
assertEquals(pncObj1.getBuildId(), pncObj2.getBuildId()); |
||||
} |
||||
|
||||
@Test |
||||
void TestScan() { |
||||
JSONObject jsonObject = new JSONObject(); |
||||
jsonObject.put("scan_id", "ABC"); |
||||
jsonObject.put("offering_id", "product#"); |
||||
jsonObject.put("event_id", "event#"); |
||||
jsonObject.put("is_managed_service", "TRUE"); |
||||
jsonObject.put("component_list", "components"); |
||||
|
||||
ScanObj scanObj1 = ScanObjPayload.constructScanPayload(jsonObject); |
||||
ScanObj scanObj2 = new ScanObj( |
||||
jsonObject.getString("scan_id"), |
||||
jsonObject.getString("offering_id"), |
||||
jsonObject.getString("event_id"), |
||||
jsonObject.getString("is_managed_service"), |
||||
jsonObject.getString("component_list")); |
||||
logger.info("ScanObj1: " + scanObj1.toString()); |
||||
logger.info("ScanObj2: " + scanObj2.toString()); |
||||
assertEquals(scanObj1.getScanId(), scanObj2.getScanId()); |
||||
assertEquals(scanObj1.getProductId(), scanObj2.getProductId()); |
||||
assertEquals(scanObj1.getEventId(), scanObj2.getEventId()); |
||||
assertEquals(scanObj1.getIsManagedService(), scanObj2.getIsManagedService()); |
||||
assertEquals(scanObj1.getComponentList(), scanObj2.getComponentList()); |
||||
} |
||||
} |
||||
Loading…
Reference in new issue