From 9449cf7fd0ec46a176cf197e96b151e9ff575e38 Mon Sep 17 00:00:00 2001 From: Juan Perez de Algaba Date: Wed, 26 Apr 2023 14:29:30 +0000 Subject: [PATCH 01/26] Add new directory for db schema --- schema/.gitkeep | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 schema/.gitkeep diff --git a/schema/.gitkeep b/schema/.gitkeep new file mode 100644 index 0000000..e69de29 From f502e758c7b86b0a25a4bab57855e97e15469ad8 Mon Sep 17 00:00:00 2001 From: Juan Perez de Algaba Date: Wed, 26 Apr 2023 14:29:57 +0000 Subject: [PATCH 02/26] Uploaded schema file --- schema/schema.sql | 63 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 schema/schema.sql diff --git a/schema/schema.sql b/schema/schema.sql new file mode 100644 index 0000000..942b981 --- /dev/null +++ b/schema/schema.sql @@ -0,0 +1,63 @@ +CREATE SCHEMA covscanrest; + +GRANT USAGE ON SCHEMA covscanrest TO postgres; + +CREATE TABLE IF NOT EXISTS covscanrest.products( + productID VARCHAR(100), + description VARCHAR(200), + repository VARCHAR (150), + PRIMARY KEY (productID) +); + + +CREATE TABLE IF NOT EXISTS covscanrest.scans ( + scanID SERIAL, + productID VARCHAR(100), + eventID VARCHAR(100) NOT NULL, + isManagedService BOOLEAN NOT NULL, + componentList VARCHAR(100), + datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, + PRIMARY KEY(scanID), + FOREIGN KEY (productID) REFERENCES covscanrest.products(productID) +); + +CREATE TABLE IF NOT EXISTS covscanrest.archive ( + scanID SERIAL, + productID VARCHAR(100), + eventID VARCHAR(100) NOT NULL, + isManagedService BOOLEAN NOT NULL, + componentList VARCHAR(100), + datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, + PRIMARY KEY(scanID), + FOREIGN KEY (productID) REFERENCES covscanrest.products(productID) +); + +CREATE TABLE IF NOT EXISTS covscanrest.gitscans ( + id SERIAL, + buildSystemType VARCHAR(80), + repository VARCHAR(150), + reference VARCHAR(100), + commitId VARCHAR(100), + PRIMARY KEY(id) +); + +CREATE TABLE IF NOT EXISTS covscanrest.pncscans( + id SERIAL, + buildSystemType VARCHAR(80), + buildId VARCHAR(100), + reference VARCHAR(100), + commitId VARCHAR(100), + PRIMARY KEY(id) +); + +CREATE TABLE IF NOT EXISTS covscanrest.brewscans( + id SERIAL, + buildSystemType VARCHAR(80), + brewId VARCHAR(100), + brewNVR VARCHAR(100), + pncId VARCHAR(100), + artifactType VARCHAR(100), + fileName VARCHAR(100), + builtfromSource BOOLEAN, + PRIMARY KEY(id) +); \ No newline at end of file From f3c9338181af47ed050ea85fb53b5f7bdbb50735 Mon Sep 17 00:00:00 2001 From: jperezde Date: Sun, 30 Apr 2023 19:21:50 +0200 Subject: [PATCH 03/26] Modified schema, create scraper and populate file for offerings file --- schema/OffRegScraper.py | 30 ++++++++++ schema/populate.sql | 126 ++++++++++++++++++++++++++++++++++++++++ schema/schema.sql | 42 ++++++++++---- 3 files changed, 186 insertions(+), 12 deletions(-) create mode 100644 schema/OffRegScraper.py create mode 100644 schema/populate.sql diff --git a/schema/OffRegScraper.py b/schema/OffRegScraper.py new file mode 100644 index 0000000..1107d30 --- /dev/null +++ b/schema/OffRegScraper.py @@ -0,0 +1,30 @@ +from bs4 import BeautifulSoup +import requests +import re +import csv + +results = {} + +URL = "https://product-security.pages.redhat.com/offering-registry/" +r = requests.get(URL) + +soup = BeautifulSoup(r.text, 'html.parser') +table = soup.find("table") +rows = table.findAll("tr") + +for row in rows: + for elem in row.contents: + if row.contents[1].text == 'Offering': + break + else: + # We extract the short name of the URL + re_search = re.search('/offering-registry/offerings/(.*)/', row.contents[1].contents[0].attrs["href"]) + results[re_search.group(1)] = row.contents[1].contents[0].text + break + +print(results) + +with open('offerings.csv', 'w') as csv_file: + writer = csv.writer(csv_file) + for key, value in results.items(): + writer.writerow([key, value]) diff --git a/schema/populate.sql b/schema/populate.sql new file mode 100644 index 0000000..949b780 --- /dev/null +++ b/schema/populate.sql @@ -0,0 +1,126 @@ +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('ansible-automation-platform','Ansible Automation Platform (AAP)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('advisor','Insights Advisor'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('ansible-on-aws','Ansible on AWS'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('ansible-on-azure','Ansible on Azure'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('ansible-on-gcp','Ansible on GCP'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('ansible-wisdom-service','Ansible Wisdom Service'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('cert-manager','cert-manager Operator for Red Hat OpenShift'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('compliance','Insights Compliance'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('connected-customer-experience','Connected Customer Experience (CCX)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('cost-management','Cost Management'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('custom-metric-autoscaler','OpenShift Custom Metrics Autoscaler'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('developer-sandbox-for-red-hat-openshift','Developer Sandbox for Red Hat OpenShift'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('dotnet','.NET'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('drift','Insights Drift'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('eclipse-vertx','Red Hat build of Eclipse Vert.x'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('edge-management','Edge Management'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('eventing','Insights Eventing'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('fastdatapath','RHEL Fast Datapath'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('host-management-services','Host Management Services'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('hosted-control-planes','Hosted Control Planes (Hypershift)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('hybrid-application-console','Hybrid Application Console (HAC)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('insights-essential','Insights Essentials'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('kernel-module-management','Kernel Module Management'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('logging-subsystem-for-red-hat-openshift','Logging Subsystem for Red Hat OpenShift'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('lvms-operator','LVMS Operator'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('malware-detection','Insights Malware Detection'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('mgmt-platform','Management Platform'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('migration-toolkit-for-applications','Migration Toolkit for Applications (MTA)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('migration-toolkit-for-containers','Migration Toolkit for Containers (MTC)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('migration-toolkit-for-runtimes','Migration Toolkit for Runtimes (MTR)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('migration-toolkit-for-virtualization','Migration Toolkit for Virtualization (MTV)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('network-observability-operator','Network Observability Operator'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('node-healthcheck-operator','Node HealthCheck Operator'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('node-maintenance-operator','Node Maintenance Operator'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('nvidia-gpu-add-on','NVIDIA GPU Add-On'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('oadp','OpenShift API for Data Protection'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-container-platform','Openshift Container Platform (OCP)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-container-storage','OpenShift Container Storage (OCS)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-data-foundation-managed-service','Red Hat OpenShift Data Foundation Managed Service'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-dedicated','OpenShift Dedicated (OSD/ROSA)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-developer-tools-and-services-helm','OpenShift Developer Tools and Services (Helm)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-developer-tools-and-services-jenkins','OpenShift Developer Tools and Services (Jenkins)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-distributed-tracing','OpenShift Distributed Tracing'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-on-azure','Openshift on Azure (ARO)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-run-once-duration-override-operator','OpenShift Run Once Duration Override Operator'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-sandboxed-containers','Openshift Sandboxed Containers'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-secondary-scheduler-operator','OpenShift Secondary Scheduler Operator'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-servicemesh','OpenShift Service Mesh'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-virtualization','OpenShift Virtualization (CNV)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-web-terminal-operator','OpenShift Web Terminal Operator'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-winc','Windows Container Support for OpenShift'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('patch','Insights Patch'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('product-discovery','Product Discovery'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-3scale-api-management-platform','Red Hat 3scale API Management Platform'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-advanced-cluster-management','Red Hat Advanced Cluster Management (RHACM)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-amq-broker','Red Hat AMQ Broker'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-amq-clients','Red Hat AMQ Clients'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-amq-interconnect','Red Hat AMQ Interconnect'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-amq-online','Red Hat AMQ Online'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-amq-streams','Red Hat AMQ Streams'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-build-apicurio-registry','Red Hat build of Apicurio Registry (formerly known as Integration Service Registry)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-build-quarkus','Red Hat Build of Quarkus'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-camel-extensions-quarkus','Red Hat Camel Extensions for Quarkus'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-camel-k','Red Hat Camel K'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-camel-spring-boot','Red Hat Camel for Spring Boot'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-ceph-storage','Red Hat Ceph Storage'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-certificate-system','Red Hat Certificate System (RHCS)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-certification-program','Red Hat Certification Program (rhcertification)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-code-quarkus','Red Hat Code Quarkus'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-core-os','Red Hat CoreOS'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-data-grid','Red Hat Data Grid'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-debezium','Red Hat Debezium'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-decision-manager','Red Hat Decision Manager'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-developer-hub','Red Hat Developer Hub'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-developer-toolset','Red Hat Developer Toolset (DTS)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-devtools-compilers','Red Hat Developer Tools (DevTools Compilers)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-directory-server','Red Hat Directory Server (RHDS)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-10','Red Hat Enterprise Linux (RHEL) 10'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-6','Red Hat Enterprise Linux (RHEL) 6'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-7','Red Hat Enterprise Linux (RHEL) 7'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-8','Red Hat Enterprise Linux (RHEL) 8'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-9','Red Hat Enterprise Linux (RHEL) 9'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-fuse','Red Hat Fuse'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-gluster-storage','Red Hat Gluster Storage'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-in-vehicle-os','Red Hat In-Vehicle Operating System (RHIVOS)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-jboss-core-services','Red Hat JBoss Core Services'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-jboss-eap','Red Hat JBoss Enterprise Application Platform (EAP)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-jboss-web-server','Red Hat JBoss Web Server'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-observability-service','Red Hat Observability Service'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-open-database-access','Red Hat OpenShift Database Access'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-open-shift-data-science','Red Hat OpenShift Data Science (RHODS)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openjdk','Red Hat OpenJDK'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-api-management','Red Hat OpenShift API Management'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-builds-v2','Red Hat OpenShift Builds V2'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-connectors','Red Hat OpenShift Connectors (RHOC)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-control-plane-service','Red Hat OpenShift Control Plane Service'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-data-foundation','Red Hat OpenShift Data Foundation'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-dev-spaces','Red Hat OpenShift Dev Spaces'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-gitops','Red Hat OpenShift GitOps'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-local','Red Hat OpenShift Local'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-pipelines','Red Hat OpenShift Pipelines'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-serverless','Red Hat OpenShift Serverless'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-service-registry','Red Hat OpenShift Service Registry'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-streams-apache-kafka','Red Hat OpenShift Streams for Apache Kafka (RHOSAK)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openstack-platform','Red Hat OpenStack Platform (RHOSP)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-optaplanner','Red Hat Optaplanner'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-plug-ins-for-backstage','Red Hat Plug-ins for Backstage'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-process-automation-manager','Red Hat Process Automation Manager'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-quarkus-registry','Red Hat Quarkus Registry'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-quay','Red Hat Quay'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-satellite','Red Hat Satellite'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-service-interconnect','Red Hat Service Interconnect (formerly known as Application Interconnect)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-single-sign-on','Red Hat Single Sign-On (RHSSO)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-software-collections','Red Hat Software Collections'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-support-for-spring-boot','Red Hat support for Spring Boot'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-trusted-application-pipeline','Red Hat Trusted Application Pipeline (RHTAP)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-update-infrastructure','Red Hat Update Infrastructure (RHUI)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-virtualization','Red Hat Virtualization'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('resource-optimization','Insights Resource Optimization (ROS)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('rh-vulnerability-for-ocp','Insights Vulnerability for OCP'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('rhacs','Red Hat Advanced Cluster Security for Kubernetes (RHACS)'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('self-node-remediation','Self Node Remediation'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('subscription-central','Subscription Central'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('subscription-watch','Subscription Watch'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('telco-sw-components','Telco SW Components'); +INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('vulnerability','Vulnerability'); diff --git a/schema/schema.sql b/schema/schema.sql index 942b981..298edbc 100644 --- a/schema/schema.sql +++ b/schema/schema.sql @@ -2,34 +2,52 @@ CREATE SCHEMA covscanrest; GRANT USAGE ON SCHEMA covscanrest TO postgres; -CREATE TABLE IF NOT EXISTS covscanrest.products( - productID VARCHAR(100), +CREATE TABLE IF NOT EXISTS covscanrest.offerings( + offeringId VARCHAR(100), description VARCHAR(200), - repository VARCHAR (150), - PRIMARY KEY (productID) + PRIMARY KEY (offeringId) +); + +CREATE TABLE IF NOT EXISTS covscanrest.results( + resultsId SERIAL, + datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, + state BOOLEAN, + logs bytea, + task_reference VARCHAR(50), + PRIMARY KEY (resultsId) ); -CREATE TABLE IF NOT EXISTS covscanrest.scans ( +CREATE TABLE IF NOT EXISTS covscanrest.scans( scanID SERIAL, - productID VARCHAR(100), + offeringId VARCHAR(100), eventID VARCHAR(100) NOT NULL, isManagedService BOOLEAN NOT NULL, componentList VARCHAR(100), datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, + owner VARCHAR(50) NOT NULL, + results SERIAL, + status VARCHAR (50) CONSTRAINT valid_status CHECK(status in ('PENDING', 'DELETED', 'COMPLETED', 'IN PROGRESS')), + last_updated TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, PRIMARY KEY(scanID), - FOREIGN KEY (productID) REFERENCES covscanrest.products(productID) + FOREIGN KEY (offeringId) REFERENCES covscanrest.offerings(offeringId), + FOREIGN KEY (results) REFERENCES covscanrest.results(resultsId) ); -CREATE TABLE IF NOT EXISTS covscanrest.archive ( +CREATE TABLE IF NOT EXISTS covscanrest.archive( scanID SERIAL, - productID VARCHAR(100), + offeringId VARCHAR(100), eventID VARCHAR(100) NOT NULL, isManagedService BOOLEAN NOT NULL, componentList VARCHAR(100), datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, + owner VARCHAR(50) NOT NULL, + results SERIAL, + status VARCHAR (50) CONSTRAINT valid_status CHECK(status in ('PENDING', 'DELETED', 'COMPLETED', 'IN PROGRESS')), + last_updated TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, PRIMARY KEY(scanID), - FOREIGN KEY (productID) REFERENCES covscanrest.products(productID) + FOREIGN KEY (offeringId) REFERENCES covscanrest.offerings(offeringId), + FOREIGN KEY (results) REFERENCES covscanrest.results(resultsId) ); CREATE TABLE IF NOT EXISTS covscanrest.gitscans ( @@ -38,6 +56,8 @@ CREATE TABLE IF NOT EXISTS covscanrest.gitscans ( repository VARCHAR(150), reference VARCHAR(100), commitId VARCHAR(100), + -- SHA256 has a length of 256 bits, so 256 bits would represent 64 hex characters + hashsum VARCHAR(64), PRIMARY KEY(id) ); @@ -45,8 +65,6 @@ CREATE TABLE IF NOT EXISTS covscanrest.pncscans( id SERIAL, buildSystemType VARCHAR(80), buildId VARCHAR(100), - reference VARCHAR(100), - commitId VARCHAR(100), PRIMARY KEY(id) ); From 674e248c1d368be53af1cb36754dab44ddaa4c31 Mon Sep 17 00:00:00 2001 From: Juan Perez de Algaba Date: Tue, 2 May 2023 12:34:08 +0000 Subject: [PATCH 04/26] Update modified covscanrest for osh --- schema/populate.sql | 252 ++++++++++++++++++++++---------------------- 1 file changed, 126 insertions(+), 126 deletions(-) diff --git a/schema/populate.sql b/schema/populate.sql index 949b780..ee69404 100644 --- a/schema/populate.sql +++ b/schema/populate.sql @@ -1,126 +1,126 @@ -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('ansible-automation-platform','Ansible Automation Platform (AAP)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('advisor','Insights Advisor'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('ansible-on-aws','Ansible on AWS'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('ansible-on-azure','Ansible on Azure'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('ansible-on-gcp','Ansible on GCP'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('ansible-wisdom-service','Ansible Wisdom Service'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('cert-manager','cert-manager Operator for Red Hat OpenShift'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('compliance','Insights Compliance'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('connected-customer-experience','Connected Customer Experience (CCX)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('cost-management','Cost Management'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('custom-metric-autoscaler','OpenShift Custom Metrics Autoscaler'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('developer-sandbox-for-red-hat-openshift','Developer Sandbox for Red Hat OpenShift'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('dotnet','.NET'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('drift','Insights Drift'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('eclipse-vertx','Red Hat build of Eclipse Vert.x'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('edge-management','Edge Management'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('eventing','Insights Eventing'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('fastdatapath','RHEL Fast Datapath'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('host-management-services','Host Management Services'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('hosted-control-planes','Hosted Control Planes (Hypershift)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('hybrid-application-console','Hybrid Application Console (HAC)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('insights-essential','Insights Essentials'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('kernel-module-management','Kernel Module Management'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('logging-subsystem-for-red-hat-openshift','Logging Subsystem for Red Hat OpenShift'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('lvms-operator','LVMS Operator'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('malware-detection','Insights Malware Detection'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('mgmt-platform','Management Platform'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('migration-toolkit-for-applications','Migration Toolkit for Applications (MTA)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('migration-toolkit-for-containers','Migration Toolkit for Containers (MTC)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('migration-toolkit-for-runtimes','Migration Toolkit for Runtimes (MTR)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('migration-toolkit-for-virtualization','Migration Toolkit for Virtualization (MTV)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('network-observability-operator','Network Observability Operator'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('node-healthcheck-operator','Node HealthCheck Operator'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('node-maintenance-operator','Node Maintenance Operator'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('nvidia-gpu-add-on','NVIDIA GPU Add-On'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('oadp','OpenShift API for Data Protection'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-container-platform','Openshift Container Platform (OCP)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-container-storage','OpenShift Container Storage (OCS)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-data-foundation-managed-service','Red Hat OpenShift Data Foundation Managed Service'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-dedicated','OpenShift Dedicated (OSD/ROSA)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-developer-tools-and-services-helm','OpenShift Developer Tools and Services (Helm)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-developer-tools-and-services-jenkins','OpenShift Developer Tools and Services (Jenkins)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-distributed-tracing','OpenShift Distributed Tracing'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-on-azure','Openshift on Azure (ARO)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-run-once-duration-override-operator','OpenShift Run Once Duration Override Operator'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-sandboxed-containers','Openshift Sandboxed Containers'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-secondary-scheduler-operator','OpenShift Secondary Scheduler Operator'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-servicemesh','OpenShift Service Mesh'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-virtualization','OpenShift Virtualization (CNV)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-web-terminal-operator','OpenShift Web Terminal Operator'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('openshift-winc','Windows Container Support for OpenShift'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('patch','Insights Patch'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('product-discovery','Product Discovery'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-3scale-api-management-platform','Red Hat 3scale API Management Platform'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-advanced-cluster-management','Red Hat Advanced Cluster Management (RHACM)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-amq-broker','Red Hat AMQ Broker'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-amq-clients','Red Hat AMQ Clients'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-amq-interconnect','Red Hat AMQ Interconnect'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-amq-online','Red Hat AMQ Online'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-amq-streams','Red Hat AMQ Streams'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-build-apicurio-registry','Red Hat build of Apicurio Registry (formerly known as Integration Service Registry)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-build-quarkus','Red Hat Build of Quarkus'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-camel-extensions-quarkus','Red Hat Camel Extensions for Quarkus'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-camel-k','Red Hat Camel K'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-camel-spring-boot','Red Hat Camel for Spring Boot'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-ceph-storage','Red Hat Ceph Storage'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-certificate-system','Red Hat Certificate System (RHCS)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-certification-program','Red Hat Certification Program (rhcertification)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-code-quarkus','Red Hat Code Quarkus'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-core-os','Red Hat CoreOS'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-data-grid','Red Hat Data Grid'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-debezium','Red Hat Debezium'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-decision-manager','Red Hat Decision Manager'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-developer-hub','Red Hat Developer Hub'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-developer-toolset','Red Hat Developer Toolset (DTS)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-devtools-compilers','Red Hat Developer Tools (DevTools Compilers)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-directory-server','Red Hat Directory Server (RHDS)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-10','Red Hat Enterprise Linux (RHEL) 10'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-6','Red Hat Enterprise Linux (RHEL) 6'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-7','Red Hat Enterprise Linux (RHEL) 7'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-8','Red Hat Enterprise Linux (RHEL) 8'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-9','Red Hat Enterprise Linux (RHEL) 9'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-fuse','Red Hat Fuse'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-gluster-storage','Red Hat Gluster Storage'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-in-vehicle-os','Red Hat In-Vehicle Operating System (RHIVOS)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-jboss-core-services','Red Hat JBoss Core Services'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-jboss-eap','Red Hat JBoss Enterprise Application Platform (EAP)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-jboss-web-server','Red Hat JBoss Web Server'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-observability-service','Red Hat Observability Service'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-open-database-access','Red Hat OpenShift Database Access'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-open-shift-data-science','Red Hat OpenShift Data Science (RHODS)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openjdk','Red Hat OpenJDK'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-api-management','Red Hat OpenShift API Management'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-builds-v2','Red Hat OpenShift Builds V2'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-connectors','Red Hat OpenShift Connectors (RHOC)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-control-plane-service','Red Hat OpenShift Control Plane Service'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-data-foundation','Red Hat OpenShift Data Foundation'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-dev-spaces','Red Hat OpenShift Dev Spaces'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-gitops','Red Hat OpenShift GitOps'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-local','Red Hat OpenShift Local'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-pipelines','Red Hat OpenShift Pipelines'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-serverless','Red Hat OpenShift Serverless'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-service-registry','Red Hat OpenShift Service Registry'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openshift-streams-apache-kafka','Red Hat OpenShift Streams for Apache Kafka (RHOSAK)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-openstack-platform','Red Hat OpenStack Platform (RHOSP)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-optaplanner','Red Hat Optaplanner'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-plug-ins-for-backstage','Red Hat Plug-ins for Backstage'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-process-automation-manager','Red Hat Process Automation Manager'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-quarkus-registry','Red Hat Quarkus Registry'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-quay','Red Hat Quay'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-satellite','Red Hat Satellite'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-service-interconnect','Red Hat Service Interconnect (formerly known as Application Interconnect)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-single-sign-on','Red Hat Single Sign-On (RHSSO)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-software-collections','Red Hat Software Collections'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-support-for-spring-boot','Red Hat support for Spring Boot'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-trusted-application-pipeline','Red Hat Trusted Application Pipeline (RHTAP)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-update-infrastructure','Red Hat Update Infrastructure (RHUI)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('red-hat-virtualization','Red Hat Virtualization'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('resource-optimization','Insights Resource Optimization (ROS)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('rh-vulnerability-for-ocp','Insights Vulnerability for OCP'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('rhacs','Red Hat Advanced Cluster Security for Kubernetes (RHACS)'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('self-node-remediation','Self Node Remediation'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('subscription-central','Subscription Central'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('subscription-watch','Subscription Watch'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('telco-sw-components','Telco SW Components'); -INSERT INTO covscanrest.offerings(offeringId,description) VALUES ('vulnerability','Vulnerability'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('ansible-automation-platform','Ansible Automation Platform (AAP)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('advisor','Insights Advisor'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('ansible-on-aws','Ansible on AWS'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('ansible-on-azure','Ansible on Azure'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('ansible-on-gcp','Ansible on GCP'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('ansible-wisdom-service','Ansible Wisdom Service'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('cert-manager','cert-manager Operator for Red Hat OpenShift'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('compliance','Insights Compliance'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('connected-customer-experience','Connected Customer Experience (CCX)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('cost-management','Cost Management'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('custom-metric-autoscaler','OpenShift Custom Metrics Autoscaler'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('developer-sandbox-for-red-hat-openshift','Developer Sandbox for Red Hat OpenShift'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('dotnet','.NET'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('drift','Insights Drift'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('eclipse-vertx','Red Hat build of Eclipse Vert.x'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('edge-management','Edge Management'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('eventing','Insights Eventing'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('fastdatapath','RHEL Fast Datapath'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('host-management-services','Host Management Services'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('hosted-control-planes','Hosted Control Planes (Hypershift)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('hybrid-application-console','Hybrid Application Console (HAC)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('insights-essential','Insights Essentials'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('kernel-module-management','Kernel Module Management'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('logging-subsystem-for-red-hat-openshift','Logging Subsystem for Red Hat OpenShift'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('lvms-operator','LVMS Operator'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('malware-detection','Insights Malware Detection'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('mgmt-platform','Management Platform'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('migration-toolkit-for-applications','Migration Toolkit for Applications (MTA)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('migration-toolkit-for-containers','Migration Toolkit for Containers (MTC)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('migration-toolkit-for-runtimes','Migration Toolkit for Runtimes (MTR)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('migration-toolkit-for-virtualization','Migration Toolkit for Virtualization (MTV)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('network-observability-operator','Network Observability Operator'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('node-healthcheck-operator','Node HealthCheck Operator'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('node-maintenance-operator','Node Maintenance Operator'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('nvidia-gpu-add-on','NVIDIA GPU Add-On'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('oadp','OpenShift API for Data Protection'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-container-platform','Openshift Container Platform (OCP)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-container-storage','OpenShift Container Storage (OCS)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-data-foundation-managed-service','Red Hat OpenShift Data Foundation Managed Service'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-dedicated','OpenShift Dedicated (OSD/ROSA)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-developer-tools-and-services-helm','OpenShift Developer Tools and Services (Helm)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-developer-tools-and-services-jenkins','OpenShift Developer Tools and Services (Jenkins)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-distributed-tracing','OpenShift Distributed Tracing'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-on-azure','Openshift on Azure (ARO)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-run-once-duration-override-operator','OpenShift Run Once Duration Override Operator'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-sandboxed-containers','Openshift Sandboxed Containers'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-secondary-scheduler-operator','OpenShift Secondary Scheduler Operator'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-servicemesh','OpenShift Service Mesh'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-virtualization','OpenShift Virtualization (CNV)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-web-terminal-operator','OpenShift Web Terminal Operator'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-winc','Windows Container Support for OpenShift'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('patch','Insights Patch'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('product-discovery','Product Discovery'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-3scale-api-management-platform','Red Hat 3scale API Management Platform'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-advanced-cluster-management','Red Hat Advanced Cluster Management (RHACM)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-amq-broker','Red Hat AMQ Broker'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-amq-clients','Red Hat AMQ Clients'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-amq-interconnect','Red Hat AMQ Interconnect'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-amq-online','Red Hat AMQ Online'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-amq-streams','Red Hat AMQ Streams'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-build-apicurio-registry','Red Hat build of Apicurio Registry (formerly known as Integration Service Registry)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-build-quarkus','Red Hat Build of Quarkus'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-camel-extensions-quarkus','Red Hat Camel Extensions for Quarkus'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-camel-k','Red Hat Camel K'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-camel-spring-boot','Red Hat Camel for Spring Boot'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-ceph-storage','Red Hat Ceph Storage'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-certificate-system','Red Hat Certificate System (RHCS)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-certification-program','Red Hat Certification Program (rhcertification)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-code-quarkus','Red Hat Code Quarkus'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-core-os','Red Hat CoreOS'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-data-grid','Red Hat Data Grid'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-debezium','Red Hat Debezium'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-decision-manager','Red Hat Decision Manager'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-developer-hub','Red Hat Developer Hub'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-developer-toolset','Red Hat Developer Toolset (DTS)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-devtools-compilers','Red Hat Developer Tools (DevTools Compilers)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-directory-server','Red Hat Directory Server (RHDS)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-10','Red Hat Enterprise Linux (RHEL) 10'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-6','Red Hat Enterprise Linux (RHEL) 6'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-7','Red Hat Enterprise Linux (RHEL) 7'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-8','Red Hat Enterprise Linux (RHEL) 8'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-9','Red Hat Enterprise Linux (RHEL) 9'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-fuse','Red Hat Fuse'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-gluster-storage','Red Hat Gluster Storage'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-in-vehicle-os','Red Hat In-Vehicle Operating System (RHIVOS)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-jboss-core-services','Red Hat JBoss Core Services'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-jboss-eap','Red Hat JBoss Enterprise Application Platform (EAP)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-jboss-web-server','Red Hat JBoss Web Server'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-observability-service','Red Hat Observability Service'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-open-database-access','Red Hat OpenShift Database Access'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-open-shift-data-science','Red Hat OpenShift Data Science (RHODS)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openjdk','Red Hat OpenJDK'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-api-management','Red Hat OpenShift API Management'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-builds-v2','Red Hat OpenShift Builds V2'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-connectors','Red Hat OpenShift Connectors (RHOC)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-control-plane-service','Red Hat OpenShift Control Plane Service'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-data-foundation','Red Hat OpenShift Data Foundation'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-dev-spaces','Red Hat OpenShift Dev Spaces'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-gitops','Red Hat OpenShift GitOps'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-local','Red Hat OpenShift Local'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-pipelines','Red Hat OpenShift Pipelines'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-serverless','Red Hat OpenShift Serverless'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-service-registry','Red Hat OpenShift Service Registry'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-streams-apache-kafka','Red Hat OpenShift Streams for Apache Kafka (RHOSAK)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openstack-platform','Red Hat OpenStack Platform (RHOSP)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-optaplanner','Red Hat Optaplanner'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-plug-ins-for-backstage','Red Hat Plug-ins for Backstage'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-process-automation-manager','Red Hat Process Automation Manager'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-quarkus-registry','Red Hat Quarkus Registry'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-quay','Red Hat Quay'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-satellite','Red Hat Satellite'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-service-interconnect','Red Hat Service Interconnect (formerly known as Application Interconnect)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-single-sign-on','Red Hat Single Sign-On (RHSSO)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-software-collections','Red Hat Software Collections'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-support-for-spring-boot','Red Hat support for Spring Boot'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-trusted-application-pipeline','Red Hat Trusted Application Pipeline (RHTAP)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-update-infrastructure','Red Hat Update Infrastructure (RHUI)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-virtualization','Red Hat Virtualization'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('resource-optimization','Insights Resource Optimization (ROS)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('rh-vulnerability-for-ocp','Insights Vulnerability for OCP'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('rhacs','Red Hat Advanced Cluster Security for Kubernetes (RHACS)'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('self-node-remediation','Self Node Remediation'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('subscription-central','Subscription Central'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('subscription-watch','Subscription Watch'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('telco-sw-components','Telco SW Components'); +INSERT INTO osh.offerings(offeringId,description) VALUES ('vulnerability','Vulnerability'); From bb63891276b9a91fbd2aee51e6140b6ede1971a2 Mon Sep 17 00:00:00 2001 From: Juan Perez de Algaba Date: Tue, 2 May 2023 12:34:37 +0000 Subject: [PATCH 05/26] Update schema.sql to replace covscan for osh --- schema/schema.sql | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/schema/schema.sql b/schema/schema.sql index 298edbc..fe05d59 100644 --- a/schema/schema.sql +++ b/schema/schema.sql @@ -1,14 +1,14 @@ -CREATE SCHEMA covscanrest; +CREATE SCHEMA osh; -GRANT USAGE ON SCHEMA covscanrest TO postgres; +GRANT USAGE ON SCHEMA osh TO postgres; -CREATE TABLE IF NOT EXISTS covscanrest.offerings( +CREATE TABLE IF NOT EXISTS osh.offerings( offeringId VARCHAR(100), description VARCHAR(200), PRIMARY KEY (offeringId) ); -CREATE TABLE IF NOT EXISTS covscanrest.results( +CREATE TABLE IF NOT EXISTS osh.results( resultsId SERIAL, datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, state BOOLEAN, @@ -18,7 +18,7 @@ CREATE TABLE IF NOT EXISTS covscanrest.results( ); -CREATE TABLE IF NOT EXISTS covscanrest.scans( +CREATE TABLE IF NOT EXISTS osh.scans( scanID SERIAL, offeringId VARCHAR(100), eventID VARCHAR(100) NOT NULL, @@ -30,11 +30,11 @@ CREATE TABLE IF NOT EXISTS covscanrest.scans( status VARCHAR (50) CONSTRAINT valid_status CHECK(status in ('PENDING', 'DELETED', 'COMPLETED', 'IN PROGRESS')), last_updated TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, PRIMARY KEY(scanID), - FOREIGN KEY (offeringId) REFERENCES covscanrest.offerings(offeringId), - FOREIGN KEY (results) REFERENCES covscanrest.results(resultsId) + FOREIGN KEY (offeringId) REFERENCES osh.offerings(offeringId), + FOREIGN KEY (results) REFERENCES osh.results(resultsId) ); -CREATE TABLE IF NOT EXISTS covscanrest.archive( +CREATE TABLE IF NOT EXISTS osh.archive( scanID SERIAL, offeringId VARCHAR(100), eventID VARCHAR(100) NOT NULL, @@ -46,11 +46,11 @@ CREATE TABLE IF NOT EXISTS covscanrest.archive( status VARCHAR (50) CONSTRAINT valid_status CHECK(status in ('PENDING', 'DELETED', 'COMPLETED', 'IN PROGRESS')), last_updated TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, PRIMARY KEY(scanID), - FOREIGN KEY (offeringId) REFERENCES covscanrest.offerings(offeringId), - FOREIGN KEY (results) REFERENCES covscanrest.results(resultsId) + FOREIGN KEY (offeringId) REFERENCES osh.offerings(offeringId), + FOREIGN KEY (results) REFERENCES osh.results(resultsId) ); -CREATE TABLE IF NOT EXISTS covscanrest.gitscans ( +CREATE TABLE IF NOT EXISTS osh.gitscans ( id SERIAL, buildSystemType VARCHAR(80), repository VARCHAR(150), @@ -61,14 +61,14 @@ CREATE TABLE IF NOT EXISTS covscanrest.gitscans ( PRIMARY KEY(id) ); -CREATE TABLE IF NOT EXISTS covscanrest.pncscans( +CREATE TABLE IF NOT EXISTS osh.pncscans( id SERIAL, buildSystemType VARCHAR(80), buildId VARCHAR(100), PRIMARY KEY(id) ); -CREATE TABLE IF NOT EXISTS covscanrest.brewscans( +CREATE TABLE IF NOT EXISTS osh.brewscans( id SERIAL, buildSystemType VARCHAR(80), brewId VARCHAR(100), @@ -78,4 +78,4 @@ CREATE TABLE IF NOT EXISTS covscanrest.brewscans( fileName VARCHAR(100), builtfromSource BOOLEAN, PRIMARY KEY(id) -); \ No newline at end of file +); From 6df7da6c106e87efd3f9de1f9c1c1f917c974ca0 Mon Sep 17 00:00:00 2001 From: jperezde Date: Wed, 7 Jun 2023 14:19:01 +0200 Subject: [PATCH 06/26] Test Dependency --- pom.xml | 1 - 1 file changed, 1 deletion(-) diff --git a/pom.xml b/pom.xml index a884d11..1e0ded1 100644 --- a/pom.xml +++ b/pom.xml @@ -131,7 +131,6 @@ 2.5.2 --> - From 22c0be081bfe28afad985c49e4cdf0a29d1accd1 Mon Sep 17 00:00:00 2001 From: jperezde Date: Wed, 7 Jun 2023 14:41:15 +0200 Subject: [PATCH 07/26] Test install Kerberos --- src/main/docker/Dockerfile.jvm | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/docker/Dockerfile.jvm b/src/main/docker/Dockerfile.jvm index 839c411..3940446 100644 --- a/src/main/docker/Dockerfile.jvm +++ b/src/main/docker/Dockerfile.jvm @@ -86,6 +86,8 @@ COPY --chown=185 target/quarkus-app/*.jar /deployments/ COPY --chown=185 target/quarkus-app/app/ /deployments/app/ COPY --chown=185 target/quarkus-app/quarkus/ /deployments/quarkus/ +RUN microdnf install krb5-server krb5-libs krb5-workstation + EXPOSE 8080 USER 185 ENV JAVA_OPTS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager" From 94d72b95c8ad98a939a869dadcef7cfdcbcf8753 Mon Sep 17 00:00:00 2001 From: jperezde Date: Wed, 7 Jun 2023 15:30:36 +0200 Subject: [PATCH 08/26] Added kerberos dependendency in pom.xml --- pom.xml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pom.xml b/pom.xml index 1e0ded1..c035783 100644 --- a/pom.xml +++ b/pom.xml @@ -131,6 +131,13 @@ 2.5.2 --> + + + io.quarkiverse.kerberos + quarkus-kerberos + 2.0.0 + + From d3e2990851ac1dc3abb7ab01bd597f7a8d2811d7 Mon Sep 17 00:00:00 2001 From: jperezde Date: Wed, 7 Jun 2023 18:32:32 +0200 Subject: [PATCH 09/26] Modified application.properties --- pom.xml | 5 +++++ src/main/resources/application.properties | 7 ++++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index c035783..60df62d 100644 --- a/pom.xml +++ b/pom.xml @@ -138,6 +138,11 @@ 2.0.0 + + io.quarkus + quarkus-kubernetes-config + + diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index eca88b0..3ad9a1a 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -4,4 +4,9 @@ # couchdb.name=scan-results # couchdb.url=https://localhost:5984 -# quarkus.hibernate-orm.database.generation=drop-and-create \ No newline at end of file +# quarkus.hibernate-orm.database.generation=drop-and-create + +# Kubernetes Secret +quarkus.openshift.env.secrets=kerberos-keytab + +quarkus.kerberos.keytab-path = ${kerberos-keytab} \ No newline at end of file From c6385a754421a57cd0a26ccba187cd687c8d1258 Mon Sep 17 00:00:00 2001 From: Leonid Bossis Date: Wed, 7 Jun 2023 17:05:10 -0400 Subject: [PATCH 10/26] First changes after code review, making use of prepared statements, code cleanup --- src/main/java/dto/BrewObj.java | 24 ++--- src/main/java/dto/BrewObjPayload.java | 24 ++--- src/main/java/dto/ConnectDB.java | 5 +- src/main/java/dto/GitObj.java | 15 +-- src/main/java/dto/GitObjPayload.java | 21 ++-- src/main/java/dto/PncObj.java | 10 +- src/main/java/dto/PncObjPayload.java | 19 +--- src/main/java/dto/ScanObj.java | 5 +- src/main/java/dto/ScanObjPayload.java | 24 ++--- src/main/java/rest/CreateGetResource.java | 61 +++--------- src/main/java/rest/CreateScanRequest.java | 92 +++++++---------- src/main/java/rest/CreateScanResource.java | 42 +++----- src/main/java/rest/CreateStartScan.java | 110 +++++++++++---------- 13 files changed, 184 insertions(+), 268 deletions(-) diff --git a/src/main/java/dto/BrewObj.java b/src/main/java/dto/BrewObj.java index 4ddcdab..449b9c2 100644 --- a/src/main/java/dto/BrewObj.java +++ b/src/main/java/dto/BrewObj.java @@ -5,9 +5,6 @@ import lombok.Builder; import lombok.Getter; import lombok.ToString; import lombok.extern.jackson.Jacksonized; - -// import org.jboss.pnc.api.dto.Request; - import java.io.Serializable; @ToString @@ -16,11 +13,16 @@ import java.io.Serializable; @Jacksonized @Builder public class BrewObj implements Serializable { - public String buildSystemType; - public String brewId; - public String brewNvr; - public String pncId; - public String artifactType; - public String fileName; - public String buildFromSource; -} \ No newline at end of file + + public static final String SQL = "INSERT INTO brewscans " + + "(buildSystemType, brewId, brewNVR, pncId, artifactType, fileName, builtfromSource)" + + "VALUES (? ? ? ? ? ? ?)"; + + private String buildSystemType; + private String brewId; + private String brewNvr; + private String pncId; + private String artifactType; + private String fileName; + private Boolean builtFromSource; +} diff --git a/src/main/java/dto/BrewObjPayload.java b/src/main/java/dto/BrewObjPayload.java index 95b7928..2252837 100644 --- a/src/main/java/dto/BrewObjPayload.java +++ b/src/main/java/dto/BrewObjPayload.java @@ -1,23 +1,19 @@ package dto; -import org.eclipse.microprofile.config.ConfigProvider; -// import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload; -// import org.jboss.pnc.api.dto.HeartbeatConfig; -// import org.jboss.pnc.api.dto.Request; - -import java.net.URI; import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.sql.Struct; -import java.util.*; - -import org.json.JSONObject; -import org.json.JSONArray; +import org.json.JSONObject; import static constants.HttpHeaders.AUTHORIZATION_STRING; public class BrewObjPayload { public static BrewObj constructScanPayload(JSONObject brewObj) throws URISyntaxException { - return new BrewObj(brewObj.getString("buildSystemType"),brewObj.getString("brewId"),brewObj.getString("brewNvr"),brewObj.getString("pncId"),brewObj.getString("artifactType"),brewObj.getString("fileName"),brewObj.getString("builtFromSource")); + return new BrewObj( + brewObj.getString("buildSystemType"), + brewObj.getString("brewId"), + brewObj.getString("brewNVR"), + brewObj.getString("pncId"), + brewObj.getString("artifactType"), + brewObj.getString("fileName"), + brewObj.getBoolean("builtfromSource")); } -} \ No newline at end of file +} diff --git a/src/main/java/dto/ConnectDB.java b/src/main/java/dto/ConnectDB.java index cb8b084..db73c09 100644 --- a/src/main/java/dto/ConnectDB.java +++ b/src/main/java/dto/ConnectDB.java @@ -1,7 +1,5 @@ package dto; -import constants.PSGQL; - import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; @@ -10,8 +8,7 @@ import static constants.PSGQL.user; import static constants.PSGQL.password; import static constants.PSGQL.url; - -public class ConnectDB{ +public class ConnectDB { // private final String url = "jdbc:postgresql://localhost:5432/scandb"; // private final String user = "postgres"; // private final String password = "password"; diff --git a/src/main/java/dto/GitObj.java b/src/main/java/dto/GitObj.java index bb99507..68245ed 100644 --- a/src/main/java/dto/GitObj.java +++ b/src/main/java/dto/GitObj.java @@ -6,8 +6,6 @@ import lombok.Getter; import lombok.ToString; import lombok.extern.jackson.Jacksonized; -// import org.jboss.pnc.api.dto.Request; - import java.io.Serializable; @ToString @@ -16,8 +14,13 @@ import java.io.Serializable; @Jacksonized @Builder public class GitObj implements Serializable { - public String buildSystemType; - public String repository; - public String reference; - public String commitId; + + public static final String SQL = "INSERT INTO gitscans " + + "(buildSystemType, repository, reference, commitId)" + + "VALUES (? ? ? ?)"; + + private String buildSystemType; + private String repository; + private String reference; + private String commitId; } \ No newline at end of file diff --git a/src/main/java/dto/GitObjPayload.java b/src/main/java/dto/GitObjPayload.java index bc9eda1..9bc5ffb 100644 --- a/src/main/java/dto/GitObjPayload.java +++ b/src/main/java/dto/GitObjPayload.java @@ -1,23 +1,16 @@ package dto; -import org.eclipse.microprofile.config.ConfigProvider; -// import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload; -// import org.jboss.pnc.api.dto.HeartbeatConfig; -// import org.jboss.pnc.api.dto.Request; - -import java.net.URI; import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.sql.Struct; -import java.util.*; - -import org.json.JSONObject; -import org.json.JSONArray; +import org.json.JSONObject; import static constants.HttpHeaders.AUTHORIZATION_STRING; public class GitObjPayload { public static GitObj constructScanPayload(JSONObject gitObj) throws URISyntaxException { - return new GitObj(gitObj.getString("buildSystemType"),gitObj.getString("repository"),gitObj.getString("reference"),gitObj.getString("commitId")); + return new GitObj( + gitObj.getString("buildSystemType"), + gitObj.getString("repository"), + gitObj.getString("reference"), + gitObj.getString("commitId")); } -} \ No newline at end of file +} diff --git a/src/main/java/dto/PncObj.java b/src/main/java/dto/PncObj.java index 285c05e..2633d93 100644 --- a/src/main/java/dto/PncObj.java +++ b/src/main/java/dto/PncObj.java @@ -5,9 +5,6 @@ import lombok.Builder; import lombok.Getter; import lombok.ToString; import lombok.extern.jackson.Jacksonized; - -// import org.jboss.pnc.api.dto.Request; - import java.io.Serializable; @ToString @@ -16,6 +13,9 @@ import java.io.Serializable; @Jacksonized @Builder public class PncObj implements Serializable { - public String buildSystemType; - public String buildId; + + public static final String SQL = "INSERT INTO pncscans (buildSystemType, buildId) VALUES (? ?)"; + + private String buildSystemType; + private String buildId; } \ No newline at end of file diff --git a/src/main/java/dto/PncObjPayload.java b/src/main/java/dto/PncObjPayload.java index 8c81217..e8b106c 100644 --- a/src/main/java/dto/PncObjPayload.java +++ b/src/main/java/dto/PncObjPayload.java @@ -1,23 +1,14 @@ package dto; -import org.eclipse.microprofile.config.ConfigProvider; -// import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload; -// import org.jboss.pnc.api.dto.HeartbeatConfig; -// import org.jboss.pnc.api.dto.Request; - -import java.net.URI; import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.sql.Struct; -import java.util.*; - -import org.json.JSONObject; -import org.json.JSONArray; +import org.json.JSONObject; import static constants.HttpHeaders.AUTHORIZATION_STRING; public class PncObjPayload { public static PncObj constructScanPayload(JSONObject pncObj) throws URISyntaxException { - return new PncObj(pncObj.getString("buildSystemType"),pncObj.getString("buildId")); + return new PncObj( + pncObj.getString("buildSystemType"), + pncObj.getString("buildId")); } -} \ No newline at end of file +} diff --git a/src/main/java/dto/ScanObj.java b/src/main/java/dto/ScanObj.java index c9f825b..8c04963 100644 --- a/src/main/java/dto/ScanObj.java +++ b/src/main/java/dto/ScanObj.java @@ -6,7 +6,6 @@ import lombok.Getter; import lombok.ToString; import lombok.extern.jackson.Jacksonized; -// import org.jboss.pnc.api.dto.Request; //still need to fix all the scan objects to be significantly less poorly written //TODO add interface for the scan objects (is probably the cleanest solution) import java.io.Serializable; @@ -18,6 +17,10 @@ import java.io.Serializable; @Builder public class ScanObj implements Serializable { + public static final String SQL = "INSERT INTO scans " + + "(scanID, offeringId, eventID, isManagedService, componentlist) " + + "VALUES (? ? ? ? ?)"; + public String scanId; public String productId; public String eventId; diff --git a/src/main/java/dto/ScanObjPayload.java b/src/main/java/dto/ScanObjPayload.java index b19c1ad..b9a8be1 100644 --- a/src/main/java/dto/ScanObjPayload.java +++ b/src/main/java/dto/ScanObjPayload.java @@ -1,23 +1,15 @@ package dto; -import org.eclipse.microprofile.config.ConfigProvider; -// import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload; -// import org.jboss.pnc.api.dto.HeartbeatConfig; -// import org.jboss.pnc.api.dto.Request; - -import java.net.URI; import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.sql.Struct; -import java.util.*; - -import org.json.JSONObject; -import org.json.JSONArray; - -import static constants.HttpHeaders.AUTHORIZATION_STRING; +import org.json.JSONObject; public class ScanObjPayload { public static ScanObj constructScanPayload(JSONObject scanObj) throws URISyntaxException { - return new ScanObj(scanObj.getString("scanId"),scanObj.getString("productId"),scanObj.getString("eventId"),scanObj.getString("isManagedService"),scanObj.getString("componentList")); + return new ScanObj( + scanObj.getString("scanID"), + scanObj.getString("offeringId"), + scanObj.getString("eventID"), + scanObj.getString("isManagedService"), + scanObj.getString("componentList")); } -} \ No newline at end of file +} diff --git a/src/main/java/rest/CreateGetResource.java b/src/main/java/rest/CreateGetResource.java index 8ab6974..6fd3858 100644 --- a/src/main/java/rest/CreateGetResource.java +++ b/src/main/java/rest/CreateGetResource.java @@ -5,45 +5,12 @@ import java.util.LinkedHashMap; import java.util.Set; import dto.ScanObj; import dto.ConnectDB; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.Statement; - -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; -import java.util.Set; -import java.util.stream.Collectors; -import javax.inject.Inject; -import javax.ws.rs.Consumes; - import java.sql.*; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectWriter; - // import org.hibernate.EntityManager; -import jakarta.persistence.EntityManager; -import jakarta.persistence.Cacheable; -import jakarta.persistence.Column; -import jakarta.persistence.Entity; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.Id; -import jakarta.persistence.NamedQuery; -import jakarta.persistence.QueryHint; -import jakarta.persistence.SequenceGenerator; -import jakarta.persistence.Table; // @Path("/api/v1/[osh-scan]") @Path("/scanGet") @@ -64,22 +31,24 @@ public class CreateGetResource { @Path("/{scanId}") public Set list(@PathParam("scanId") String scanId) { //use to return specific scanIds just use usual fetch from sets, will be querying hte db directly here - try { - ConnectDB connectDB = new ConnectDB(); - Connection conn = connectDB.connect(); - Statement stmt = null; - String sql = "SELECT * FROM scans WHERE scanid=" +scanId; - stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery(sql); - + ConnectDB connectDB = new ConnectDB(); + String sql = "SELECT * FROM scans WHERE scanid=?"; + try(Connection conn = connectDB.connect(); + PreparedStatement pstmt = conn.prepareStatement(sql)) { + pstmt.setString(1, scanId); + ResultSet rs = pstmt.executeQuery(); while (rs.next()) { //very ugly solution needs some change to where we put the query - Scans.add(new ScanObj(rs.getString("scanid"),rs.getString("productid"),rs.getString("eventid"),rs.getString("ismanagedservice"),rs.getString("componentlist"))); - conn.close(); + Scans.add(new ScanObj( + rs.getString("scanID"), + rs.getString("offeringId"), + rs.getString("eventID"), + rs.getString("isManagedService"), + rs.getString("componentlist"))); } - } catch (SQLException e){ - System.out.println(e); + } catch (SQLException e) { + System.out.println(e.getMessage()); } return Scans; } -} \ No newline at end of file +} diff --git a/src/main/java/rest/CreateScanRequest.java b/src/main/java/rest/CreateScanRequest.java index a9b9679..bbf0d11 100644 --- a/src/main/java/rest/CreateScanRequest.java +++ b/src/main/java/rest/CreateScanRequest.java @@ -1,66 +1,48 @@ package rest; import org.eclipse.microprofile.rest.client.inject.RestClient; -import dto.ScanObj; - -import javax.inject.Inject; import javax.validation.Valid; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.Path; -import java.net.URI; import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.UUID; -import org.json.JSONObject; -import org.json.JSONArray; -import dto.ScanObj; +import java.sql.*; +import org.json.JSONObject; import dto.BrewObj; import dto.ConnectDB; -import dto.ScanObjPayload; import dto.BrewObjPayload; import dto.GitObj; import dto.GitObjPayload; import dto.PncObj; import dto.PncObjPayload; -import static constants.HttpHeaders.AUTHORIZATION_STRING; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.Statement; - @Path("/scanRequest") public class CreateScanRequest { - //all of these need cleaning up to be a more sensible soution + // all of these need cleaning up to be a more sensible solution @RestClient CreateScanService createScanService; @POST @Path("/brew") @Consumes({ "application/json" }) - //in theory should take List to clean it up - public BrewObj invokeScanAnalyze(@Valid String scanInvocation) throws URISyntaxException { + // in theory should take List to clean it up + public BrewObj invokeBrewScanAnalyze(@Valid String scanInvocation) throws URISyntaxException { JSONObject jsonData = new JSONObject(scanInvocation); BrewObj brewObj = BrewObjPayload.constructScanPayload(jsonData); - ConnectDB connectDB = new ConnectDB(); - Connection conn = connectDB.connect(); - Statement stmt = null; - String sql = "INSERT INTO brewscans (buildsystemtype, brewid, brewnvr, pncid, artifacttype, filename, builtfromsource) VALUES ('"+brewObj.buildSystemType+"','"+brewObj.brewId+"','"+brewObj.brewNvr+"','"+brewObj.pncId+"','"+brewObj.artifactType+"','"+brewObj.fileName+"','"+brewObj.buildFromSource+"')"; - try{ - stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery(sql); - conn.close(); - } catch (SQLException e){ - System.out.println(e); + try(Connection conn = connectDB.connect(); + PreparedStatement pstmt = conn.prepareStatement(BrewObj.SQL)) { + pstmt.setString(1, brewObj.getBuildSystemType()); + pstmt.setString(2, brewObj.getBrewId()); + pstmt.setString(3, brewObj.getBrewNvr()); + pstmt.setString(4, brewObj.getPncId()); + pstmt.setString(5, brewObj.getArtifactType()); + pstmt.setString(6, brewObj.getFileName()); + pstmt.setBoolean(7, brewObj.getBuiltFromSource()); + pstmt.executeUpdate(); + } catch (SQLException e) { + System.out.println(e.getMessage()); } return brewObj; } @@ -71,18 +53,17 @@ public class CreateScanRequest { public GitObj invokeGitScanAnalyze(@Valid String scanInvocation)throws URISyntaxException { JSONObject jsonData = new JSONObject(scanInvocation); GitObj gitObj = GitObjPayload.constructScanPayload(jsonData); - ConnectDB connectDB = new ConnectDB(); - Connection conn = connectDB.connect(); - Statement stmt = null; - String sql = "INSERT INTO gitscans (buildsystemtype, repository, reference, commitid) VALUES ('"+gitObj.buildSystemType+"','"+gitObj.repository+"','"+gitObj.reference+"','"+gitObj.commitId+"')"; - try{ - stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery(sql); - conn.close(); - } catch (SQLException e){ - System.out.println(e); - } + try(Connection conn = connectDB.connect(); + PreparedStatement pstmt = conn.prepareStatement(GitObj.SQL)) { + pstmt.setString(1, gitObj.getBuildSystemType()); + pstmt.setString(2, gitObj.getRepository()); + pstmt.setString(3, gitObj.getReference()); + pstmt.setString(4, gitObj.getCommitId()); + pstmt.executeUpdate(); + } catch (SQLException e) { + System.out.println(e.getMessage()); + } return gitObj; } @@ -92,18 +73,15 @@ public class CreateScanRequest { public PncObj invokePncScanAnalyze(@Valid String scanInvocation)throws URISyntaxException { JSONObject jsonData = new JSONObject(scanInvocation); PncObj pncObj = PncObjPayload.constructScanPayload(jsonData); - ConnectDB connectDB = new ConnectDB(); - Connection conn = connectDB.connect(); - Statement stmt = null; - String sql = "INSERT INTO pncscans (buildsystemtype, buildid) VALUES ('"+pncObj.buildSystemType+"','"+pncObj.buildId+"')"; - try{ - stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery(sql); - conn.close(); - } catch (SQLException e){ - System.out.println(e); - } + try(Connection conn = connectDB.connect(); + PreparedStatement pstmt = conn.prepareStatement(PncObj.SQL)) { + pstmt.setString(1, pncObj.getBuildSystemType()); + pstmt.setString(2, pncObj.getBuildId()); + pstmt.executeUpdate(); + } catch (SQLException e) { + System.out.println(e.getMessage()); + } return pncObj; } } diff --git a/src/main/java/rest/CreateScanResource.java b/src/main/java/rest/CreateScanResource.java index 107f839..df83367 100644 --- a/src/main/java/rest/CreateScanResource.java +++ b/src/main/java/rest/CreateScanResource.java @@ -1,34 +1,17 @@ package rest; +import dto.*; import org.eclipse.microprofile.rest.client.inject.RestClient; -import dto.ScanObj; - -import javax.inject.Inject; import javax.validation.Valid; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.Path; -import java.net.URI; import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.UUID; -import org.json.JSONObject; -import org.json.JSONArray; +import java.sql.*; +import org.json.JSONObject; import dto.ScanObj; -import dto.ConnectDB; -import dto.ScanObjPayload; import static constants.HttpHeaders.AUTHORIZATION_STRING; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.Statement; @Path("/") public class CreateScanResource { @@ -43,15 +26,16 @@ public class CreateScanResource { JSONObject jsonData = new JSONObject(scanInvocation); ScanObj scanObj = ScanObjPayload.constructScanPayload(jsonData); ConnectDB connectDB = new ConnectDB(); - Connection conn = connectDB.connect(); - Statement stmt = null; - String sql = "INSERT INTO scans (scanid, productid, eventid, ismanagedservice, componentlist) VALUES ('" +scanObj.scanId+"', '"+scanObj.productId+"', '"+scanObj.eventId+"', '"+scanObj.isManagedService+"', '"+scanObj.componentList+"')"; - try{ - stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery(sql); - conn.close(); - } catch (SQLException e){ - System.out.println(e); + try(Connection conn = connectDB.connect(); + PreparedStatement pstmt = conn.prepareStatement(ScanObj.SQL)) { + pstmt.setString(1, scanObj.scanId); + pstmt.setString(2, scanObj.productId); + pstmt.setString(3, scanObj.eventId); + pstmt.setString(4, scanObj.isManagedService); + pstmt.setString(5, scanObj.componentList); + pstmt.executeUpdate(); + } catch (SQLException e) { + System.out.println(e.getMessage()); } return scanObj; } diff --git a/src/main/java/rest/CreateStartScan.java b/src/main/java/rest/CreateStartScan.java index 6797f1e..85fd1d2 100644 --- a/src/main/java/rest/CreateStartScan.java +++ b/src/main/java/rest/CreateStartScan.java @@ -1,37 +1,14 @@ package rest; +import dto.ConnectDB; import org.eclipse.microprofile.rest.client.inject.RestClient; import dto.ScanObj; - -import javax.inject.Inject; -import javax.validation.Valid; -import javax.ws.rs.Consumes; -import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PUT; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.UUID; -import org.json.JSONObject; -import org.json.JSONArray; -import dto.ScanObj; -import dto.ConnectDB; -import dto.ScanObjPayload; - import javax.ws.rs.PathParam; +import java.net.URISyntaxException; +import java.sql.*; -import static constants.HttpHeaders.AUTHORIZATION_STRING; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.Statement; @Path("/startScan") public class CreateStartScan { @@ -42,35 +19,66 @@ public class CreateStartScan { @PUT @Path("/{scanId}") public ScanObj invokeScanAnalyze(@PathParam("scanId") String scanId) throws URISyntaxException { + ScanObj finalScan = null; ConnectDB connectDB = new ConnectDB(); - Connection conn = connectDB.connect(); - //this is ugly needs to berewritten - Statement stmt = null; + try (Connection conn = connectDB.connect()) { + finalScan = selectDataForArchiving(conn, scanId); + ArchiveSelectedScans(conn, finalScan); + PostArchivingCleanup(conn, scanId); + } catch (SQLException e) { + System.out.println(e.getMessage()); + } + return finalScan; + } + + private ScanObj selectDataForArchiving(Connection conn, String scanId) { ScanObj finalScan = null; + String qry = "SELECT * FROM scans WHERE scanid=?"; + try (PreparedStatement pstmt = conn.prepareStatement(qry)) { + pstmt.setString(1, scanId); + ResultSet rs = pstmt.executeQuery(); - String sql = "SELECT * FROM scans WHERE scanid=" + scanId; - //need to add figure out an archieve system and wether its nessacery (archieve value??) - try{ - stmt = conn.createStatement(); - //terrible solution has to be a better way of doing this - ResultSet rs = stmt.executeQuery(sql); - - //fix for individual results (not resultset) //TODO: need to add unique keys to DBs - finalScan = new ScanObj(rs.getString("scanid"),rs.getString("productid"),rs.getString("eventid"),rs.getString("ismanagedservice"),rs.getString("componentlist")); - String copySql = "INSERT INTO archive (scanid, productid, eventid, ismanagedservice, componentlist) VALUES ('" +finalScan.scanId+"', '"+finalScan.productId+"', '"+finalScan.eventId+"', '"+finalScan.isManagedService+"', '"+finalScan.componentList+"')"; - stmt.executeUpdate(copySql); + //fix for individual results (not resultset) + finalScan = new ScanObj( + rs.getString("scanID"), + rs.getString("offeringId"), + rs.getString("eventID"), + rs.getString("isManagedService"), + rs.getString("componentlist")); + } catch (SQLException e) { + System.out.println(e.getMessage()); + } + return finalScan; + } + + private void ArchiveSelectedScans(Connection conn, ScanObj finalScan) { + String qry = "INSERT INTO archive " + + "(scanID, offeringId, eventID, isManagedService, componentlist) " + + "VALUES (? ? ? ? ?)"; + try (PreparedStatement pstmt = conn.prepareStatement(qry)) { + pstmt.setString(1, finalScan.scanId); + pstmt.setString(2, finalScan.productId); + pstmt.setString(3, finalScan.eventId); + pstmt.setString(4, finalScan.isManagedService); + pstmt.setString(5, finalScan.componentList); + pstmt.executeUpdate(); + } catch (SQLException e) { + System.out.println(e.getMessage()); + } + } - //TODO add proper checks - String deleteSql = "DELETE FROM scans WHERE scanid=" + scanId; - stmt.executeUpdate(deleteSql); + private void PostArchivingCleanup(Connection conn, String scanId) { + //TODO add proper checks + //send task to the actual interface here using the resultset returned (should multiple scanids be allowed): + //once the task is complete AND we have confirmation that the scan is done run the following sql - //send task to the actual interface here using the resultset returned (should multiple scanids be allowed): - //once the task is complete AND we have confirmation that the scan is done run the following sql - conn.close(); - } catch (SQLException e){ - System.out.println(e); - } - return finalScan; + String qry = "DELETE FROM scans WHERE scanid=?"; + try (PreparedStatement pstmt = conn.prepareStatement(qry)) { + pstmt.setString(1, scanId); + pstmt.executeUpdate(); + } catch (SQLException e) { + System.out.println(e.getMessage()); + } } -} +} \ No newline at end of file From af4a80b04aad374da9903fc375ba0df373e41d83 Mon Sep 17 00:00:00 2001 From: jperezde Date: Thu, 8 Jun 2023 01:15:19 +0200 Subject: [PATCH 11/26] Added Kerberos dependency --- README.md | 127 ++++----------- pom.xml | 170 +++++++++------------ src/main/docker/Dockerfile.jvm | 1 + src/main/java/dto/BrewObj.java | 2 - src/main/java/dto/BrewObjPayload.java | 12 +- src/main/java/dto/ConnectDB.java | 6 +- src/main/java/dto/GitObj.java | 2 - src/main/java/dto/GitObjPayload.java | 12 +- src/main/java/dto/PncObj.java | 2 - src/main/java/dto/PncObjPayload.java | 12 +- src/main/java/dto/ScanObj.java | 3 - src/main/java/dto/ScanObjPayload.java | 12 +- src/main/java/rest/CreateGetResource.java | 44 +----- src/main/java/rest/CreateScanRequest.java | 30 +--- src/main/java/rest/CreateScanResource.java | 24 +-- src/main/java/rest/CreateStartScan.java | 30 +--- src/main/java/rest/RemoveScan.java | 30 +--- src/main/java/rest/Scan.java | 6 +- src/main/java/rest/StoreData.java | 84 +--------- src/main/resources/application.properties | 7 +- 20 files changed, 154 insertions(+), 462 deletions(-) diff --git a/README.md b/README.md index dcea9fd..0a5fe32 100644 --- a/README.md +++ b/README.md @@ -1,112 +1,51 @@ -See https://docs.google.com/document/d/15yod6K_ZbNkJ_ern7gwpxjBkdJIlHXORfYZ3CGQhnEM/edit?usp=sharing for a full version with images +# code-with-quarkus -# Introduction -Currently we rely on CPaaS to submit requests to PSSaaS which then invokes the PSSC scanning container. The idea behind the ScanChain api is to act as an interaction point for services to be able to directly access our scan tooling. +This project uses Quarkus, the Supersonic Subatomic Java Framework. -Our api will be written in Quarkus for ease of use and deployment to OpenShift, we will also use Tekton to assist with CI/CD. +If you want to learn more about Quarkus, please visit its website: https://quarkus.io/ . -# How to build +## Running the application in dev mode -To set up the environment. After cloning the repository: - -``` -cd / -quarkus create app quarkus:dev -mvn -N io.takari:maven:wrapper -``` - -Also, it is necessary to create a local PostgreSQL instance. For development purposes, the parameters are: -``` -username = postgresql -password = password -``` - -ToDo: Create Database Model - - - -To run the Quarkus build in dev mode simply run: -```` +You can run your application in dev mode that enables live coding using: +```shell script ./mvnw compile quarkus:dev -```` -All end points should be avaliable on localhost:8080/{endpoint}. The endpoints are listed in the endpoints section - - - -# Deploying to OpenShift (https://quarkus.io/guides/deploying-to-openshift) -Part of the advantage of working with quarkus is the ease of which we can deploy it to OpenShift. We have the OpenShift extension already installed via the pom, - -All that should be required to build and deploy OpenShift is to login to OpenShift via the usual method (oc login (creds) for example). Before running a build command: - -You can then expose the routes (oc expose {route}), then your application should be accessible on the OpenShift cluster. This is verifiable either by using the console to request which services are running (oc get svc) or by using the web console which should display the service graphically. - -# Design diagram -API endpoint diagram with all endpoints DB links, connections to further services (PNC API etc) - -# API endpoints - -## /{scanId} - GET request for retrieving scans -This is a simple request for retrieving scans that are stored in our postgresql database. The assigned scanId will return the whole scan payload in JSON format. - -## / - POST request takes a JSON payload to start scans (Maybe isnt relevant/shouldnt be included in the future) - -Creating scans via passing fully formed JSON payloads. The standard JSON format should contain: -product-id -event-id -is-managed-service -component-list -See appendix 1 for a provided example - -## /scanRequest - Post request for starting scans - -There are several different types of build that should be retrieved from the backend source. Different inputs are required based off the build source. - -The required fields for BREW builds are: -buildSystemType -brewId -brewNVR - matches brewId -pncId -artifactType -fileName -builtFromSource - -The required fields for git builds are: -buildSystemType -repository -reference -commitId - -The required fields for PNC builds are: -buildSystemType -buildId +``` -This information should allow us to have all the requirements for retrieving and then starting a scan when requested from the required sources. +> **_NOTE:_** Quarkus now ships with a Dev UI, which is available in dev mode only at http://localhost:8080/q/dev/. -## /startScan - PUT request to start off the relevant scan +## Packaging and running the application -Only requires the scanId and should start off the relevant scan, should return a success only on finished or failure if there's no further response after timeout. -## /removeScan - DELETE request to remove a scan build from DB +The application can be packaged using: +```shell script +./mvnw package +``` +It produces the `quarkus-run.jar` file in the `target/quarkus-app/` directory. +Be aware that it’s not an _über-jar_ as the dependencies are copied into the `target/quarkus-app/lib/` directory. -Only requires the scanId should remove the relevant scan from our DB. Should return a success or failure. +The application is now runnable using `java -jar target/quarkus-app/quarkus-run.jar`. -# Expanded work to do +If you want to build an _über-jar_, execute the following command: +```shell script +./mvnw package -Dquarkus.package.type=uber-jar +``` -## Jenkins +The application, packaged as an _über-jar_, is now runnable using `java -jar target/*-runner.jar`. -Haven't looked into the correct way for the API to interact with Jenkins needs more investigation. +## Creating a native executable -## Jira tickets still to do: -https://issues.redhat.com/browse/PSSECMGT-1548 -https://issues.redhat.com/browse/PSSECMGT-1549 -https://issues.redhat.com/browse/PSSECMGT-1550 -https://issues.redhat.com/browse/PSSECMGT-1551 -https://issues.redhat.com/browse/PSSECMGT-1552 -https://issues.redhat.com/browse/PSSECMGT-1553 -https://issues.redhat.com/browse/PSSECMGT-1554 +You can create a native executable using: +```shell script +./mvnw package -Pnative +``` +Or, if you don't have GraalVM installed, you can run the native executable build in a container using: +```shell script +./mvnw package -Pnative -Dquarkus.native.container-build=true +``` -# Appendix +You can then execute your native executable with: `./target/code-with-quarkus-1.0.0-SNAPSHOT-runner` -Appendix 1 +If you want to learn more about building native executables, please consult https://quarkus.io/guides/maven-tooling. +## Related Guides diff --git a/pom.xml b/pom.xml index 60df62d..285d46a 100644 --- a/pom.xml +++ b/pom.xml @@ -1,14 +1,13 @@ - - - jboss - JBoss repository - http://repository.jboss.org/maven2 - - - + + + jboss + JBoss repository + http://repository.jboss.org/maven2 + + 4.0.0 com.redhat.ncaughey rest-json-quickstart @@ -33,58 +32,44 @@ pom import - - - - - - - - io.quarkus - quarkus-openshift - - - org.json - json - 20220320 - - - - org.postgresql - postgresql - 42.6.0 - - - - - - - - org.hibernate - hibernate-core + + io.quarkiverse.kerberos + quarkus-kerberos + 1.0.0 - - org.glassfish.jaxb - jaxb-runtime + + io.quarkus + quarkus-openshift + + + org.json + json + 20220320 + + + + org.postgresql + postgresql + 42.6.0 + + + org.hibernate + hibernate-core + + + org.glassfish.jaxb + jaxb-runtime + - - - io.quarkus - quarkus-jdbc-postgresql - + + io.quarkus + quarkus-jdbc-postgresql + io.quarkus @@ -99,50 +84,32 @@ quarkus-junit5 test - - org.projectlombok - lombok - 1.18.26 - provided - - - - - javax.validation - validation-api - 1.0.0.GA - - - - jakarta.persistence - jakarta.persistence-api - 3.1.0 - - - - - org.eclipse.microprofile.rest.client - microprofile-rest-client-api - 3.0.1 - - - - - - io.quarkiverse.kerberos - quarkus-kerberos - 2.0.0 - + + org.projectlombok + lombok + 1.18.26 + provided + - - io.quarkus - quarkus-kubernetes-config - + + + javax.validation + validation-api + 1.0.0.GA + + + + jakarta.persistence + jakarta.persistence-api + 3.1.0 + + + + org.eclipse.microprofile.rest.client + microprofile-rest-client-api + 3.0.1 + @@ -199,6 +166,19 @@ + + io.smallrye + jandex-maven-plugin + 3.1.1 + + + make-index + + jandex + + + + diff --git a/src/main/docker/Dockerfile.jvm b/src/main/docker/Dockerfile.jvm index 3940446..5a12f95 100644 --- a/src/main/docker/Dockerfile.jvm +++ b/src/main/docker/Dockerfile.jvm @@ -87,6 +87,7 @@ COPY --chown=185 target/quarkus-app/app/ /deployments/app/ COPY --chown=185 target/quarkus-app/quarkus/ /deployments/quarkus/ RUN microdnf install krb5-server krb5-libs krb5-workstation +RUN cat /etc/krb5.conf EXPOSE 8080 USER 185 diff --git a/src/main/java/dto/BrewObj.java b/src/main/java/dto/BrewObj.java index 4ddcdab..a7136c4 100644 --- a/src/main/java/dto/BrewObj.java +++ b/src/main/java/dto/BrewObj.java @@ -6,8 +6,6 @@ import lombok.Getter; import lombok.ToString; import lombok.extern.jackson.Jacksonized; -// import org.jboss.pnc.api.dto.Request; - import java.io.Serializable; @ToString diff --git a/src/main/java/dto/BrewObjPayload.java b/src/main/java/dto/BrewObjPayload.java index 95b7928..0a0709f 100644 --- a/src/main/java/dto/BrewObjPayload.java +++ b/src/main/java/dto/BrewObjPayload.java @@ -1,20 +1,12 @@ package dto; -import org.eclipse.microprofile.config.ConfigProvider; // import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload; // import org.jboss.pnc.api.dto.HeartbeatConfig; // import org.jboss.pnc.api.dto.Request; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.sql.Struct; -import java.util.*; - -import org.json.JSONObject; -import org.json.JSONArray; +import org.json.JSONObject; -import static constants.HttpHeaders.AUTHORIZATION_STRING; +import java.net.URISyntaxException; public class BrewObjPayload { public static BrewObj constructScanPayload(JSONObject brewObj) throws URISyntaxException { diff --git a/src/main/java/dto/ConnectDB.java b/src/main/java/dto/ConnectDB.java index cb8b084..2080def 100644 --- a/src/main/java/dto/ConnectDB.java +++ b/src/main/java/dto/ConnectDB.java @@ -1,14 +1,10 @@ package dto; -import constants.PSGQL; - import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; -import static constants.PSGQL.user; -import static constants.PSGQL.password; -import static constants.PSGQL.url; +import static constants.PSGQL.*; public class ConnectDB{ diff --git a/src/main/java/dto/GitObj.java b/src/main/java/dto/GitObj.java index bb99507..435ec0a 100644 --- a/src/main/java/dto/GitObj.java +++ b/src/main/java/dto/GitObj.java @@ -6,8 +6,6 @@ import lombok.Getter; import lombok.ToString; import lombok.extern.jackson.Jacksonized; -// import org.jboss.pnc.api.dto.Request; - import java.io.Serializable; @ToString diff --git a/src/main/java/dto/GitObjPayload.java b/src/main/java/dto/GitObjPayload.java index bc9eda1..8d2561f 100644 --- a/src/main/java/dto/GitObjPayload.java +++ b/src/main/java/dto/GitObjPayload.java @@ -1,20 +1,12 @@ package dto; -import org.eclipse.microprofile.config.ConfigProvider; // import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload; // import org.jboss.pnc.api.dto.HeartbeatConfig; // import org.jboss.pnc.api.dto.Request; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.sql.Struct; -import java.util.*; - -import org.json.JSONObject; -import org.json.JSONArray; +import org.json.JSONObject; -import static constants.HttpHeaders.AUTHORIZATION_STRING; +import java.net.URISyntaxException; public class GitObjPayload { public static GitObj constructScanPayload(JSONObject gitObj) throws URISyntaxException { diff --git a/src/main/java/dto/PncObj.java b/src/main/java/dto/PncObj.java index 285c05e..7ce1a1a 100644 --- a/src/main/java/dto/PncObj.java +++ b/src/main/java/dto/PncObj.java @@ -6,8 +6,6 @@ import lombok.Getter; import lombok.ToString; import lombok.extern.jackson.Jacksonized; -// import org.jboss.pnc.api.dto.Request; - import java.io.Serializable; @ToString diff --git a/src/main/java/dto/PncObjPayload.java b/src/main/java/dto/PncObjPayload.java index 8c81217..3f83508 100644 --- a/src/main/java/dto/PncObjPayload.java +++ b/src/main/java/dto/PncObjPayload.java @@ -1,20 +1,12 @@ package dto; -import org.eclipse.microprofile.config.ConfigProvider; // import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload; // import org.jboss.pnc.api.dto.HeartbeatConfig; // import org.jboss.pnc.api.dto.Request; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.sql.Struct; -import java.util.*; - -import org.json.JSONObject; -import org.json.JSONArray; +import org.json.JSONObject; -import static constants.HttpHeaders.AUTHORIZATION_STRING; +import java.net.URISyntaxException; public class PncObjPayload { public static PncObj constructScanPayload(JSONObject pncObj) throws URISyntaxException { diff --git a/src/main/java/dto/ScanObj.java b/src/main/java/dto/ScanObj.java index c9f825b..a8d835b 100644 --- a/src/main/java/dto/ScanObj.java +++ b/src/main/java/dto/ScanObj.java @@ -6,9 +6,6 @@ import lombok.Getter; import lombok.ToString; import lombok.extern.jackson.Jacksonized; -// import org.jboss.pnc.api.dto.Request; -//still need to fix all the scan objects to be significantly less poorly written -//TODO add interface for the scan objects (is probably the cleanest solution) import java.io.Serializable; @ToString diff --git a/src/main/java/dto/ScanObjPayload.java b/src/main/java/dto/ScanObjPayload.java index b19c1ad..b44c92f 100644 --- a/src/main/java/dto/ScanObjPayload.java +++ b/src/main/java/dto/ScanObjPayload.java @@ -1,20 +1,12 @@ package dto; -import org.eclipse.microprofile.config.ConfigProvider; // import org.jboss.pnc.api.deliverablesanalyzer.dto.AnalyzePayload; // import org.jboss.pnc.api.dto.HeartbeatConfig; // import org.jboss.pnc.api.dto.Request; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.sql.Struct; -import java.util.*; - -import org.json.JSONObject; -import org.json.JSONArray; +import org.json.JSONObject; -import static constants.HttpHeaders.AUTHORIZATION_STRING; +import java.net.URISyntaxException; public class ScanObjPayload { public static ScanObj constructScanPayload(JSONObject scanObj) throws URISyntaxException { diff --git a/src/main/java/rest/CreateGetResource.java b/src/main/java/rest/CreateGetResource.java index 8ab6974..917d0a6 100644 --- a/src/main/java/rest/CreateGetResource.java +++ b/src/main/java/rest/CreateGetResource.java @@ -1,49 +1,21 @@ package rest; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.Set; -import dto.ScanObj; import dto.ConnectDB; +import dto.ScanObj; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.Statement; - -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Collections; +import java.util.LinkedHashMap; import java.util.Set; -import java.util.stream.Collectors; -import javax.inject.Inject; -import javax.ws.rs.Consumes; - -import java.sql.*; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectWriter; // import org.hibernate.EntityManager; -import jakarta.persistence.EntityManager; -import jakarta.persistence.Cacheable; -import jakarta.persistence.Column; -import jakarta.persistence.Entity; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.Id; -import jakarta.persistence.NamedQuery; -import jakarta.persistence.QueryHint; -import jakarta.persistence.SequenceGenerator; -import jakarta.persistence.Table; + // @Path("/api/v1/[osh-scan]") @Path("/scanGet") diff --git a/src/main/java/rest/CreateScanRequest.java b/src/main/java/rest/CreateScanRequest.java index a9b9679..4373dd5 100644 --- a/src/main/java/rest/CreateScanRequest.java +++ b/src/main/java/rest/CreateScanRequest.java @@ -1,45 +1,23 @@ package rest; +import dto.*; import org.eclipse.microprofile.rest.client.inject.RestClient; -import dto.ScanObj; +import org.json.JSONObject; -import javax.inject.Inject; import javax.validation.Valid; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.Path; -import java.net.URI; import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.UUID; -import org.json.JSONObject; -import org.json.JSONArray; -import dto.ScanObj; -import dto.BrewObj; -import dto.ConnectDB; -import dto.ScanObjPayload; -import dto.BrewObjPayload; -import dto.GitObj; -import dto.GitObjPayload; -import dto.PncObj; -import dto.PncObjPayload; - -import static constants.HttpHeaders.AUTHORIZATION_STRING; import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - -import java.sql.Connection; -import java.sql.DriverManager; import java.sql.ResultSet; +import java.sql.SQLException; import java.sql.Statement; @Path("/scanRequest") public class CreateScanRequest { - //all of these need cleaning up to be a more sensible soution + //all of these need cleaning up to be a more sensible solution @RestClient CreateScanService createScanService; diff --git a/src/main/java/rest/CreateScanResource.java b/src/main/java/rest/CreateScanResource.java index 107f839..417ccea 100644 --- a/src/main/java/rest/CreateScanResource.java +++ b/src/main/java/rest/CreateScanResource.java @@ -1,33 +1,19 @@ package rest; -import org.eclipse.microprofile.rest.client.inject.RestClient; +import dto.ConnectDB; import dto.ScanObj; +import dto.ScanObjPayload; +import org.eclipse.microprofile.rest.client.inject.RestClient; +import org.json.JSONObject; -import javax.inject.Inject; import javax.validation.Valid; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.Path; -import java.net.URI; import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.UUID; -import org.json.JSONObject; -import org.json.JSONArray; -import dto.ScanObj; -import dto.ConnectDB; -import dto.ScanObjPayload; - -import static constants.HttpHeaders.AUTHORIZATION_STRING; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - import java.sql.Connection; -import java.sql.DriverManager; import java.sql.ResultSet; +import java.sql.SQLException; import java.sql.Statement; @Path("/") diff --git a/src/main/java/rest/CreateStartScan.java b/src/main/java/rest/CreateStartScan.java index 6797f1e..66748ce 100644 --- a/src/main/java/rest/CreateStartScan.java +++ b/src/main/java/rest/CreateStartScan.java @@ -1,36 +1,16 @@ package rest; -import org.eclipse.microprofile.rest.client.inject.RestClient; +import dto.ConnectDB; import dto.ScanObj; +import org.eclipse.microprofile.rest.client.inject.RestClient; -import javax.inject.Inject; -import javax.validation.Valid; -import javax.ws.rs.Consumes; -import javax.ws.rs.POST; -import javax.ws.rs.Path; import javax.ws.rs.PUT; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.UUID; -import org.json.JSONObject; -import org.json.JSONArray; -import dto.ScanObj; -import dto.ConnectDB; -import dto.ScanObjPayload; - +import javax.ws.rs.Path; import javax.ws.rs.PathParam; - -import static constants.HttpHeaders.AUTHORIZATION_STRING; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - +import java.net.URISyntaxException; import java.sql.Connection; -import java.sql.DriverManager; import java.sql.ResultSet; +import java.sql.SQLException; import java.sql.Statement; @Path("/startScan") diff --git a/src/main/java/rest/RemoveScan.java b/src/main/java/rest/RemoveScan.java index efc0d19..e8829ef 100644 --- a/src/main/java/rest/RemoveScan.java +++ b/src/main/java/rest/RemoveScan.java @@ -1,37 +1,15 @@ package rest; -import org.eclipse.microprofile.rest.client.inject.RestClient; +import dto.ConnectDB; import dto.ScanObj; +import org.eclipse.microprofile.rest.client.inject.RestClient; -import javax.inject.Inject; -import javax.validation.Valid; -import javax.ws.rs.Consumes; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PUT; import javax.ws.rs.DELETE; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.UUID; -import org.json.JSONObject; -import org.json.JSONArray; -import dto.ScanObj; -import dto.ConnectDB; -import dto.ScanObjPayload; - +import javax.ws.rs.Path; import javax.ws.rs.PathParam; - -import static constants.HttpHeaders.AUTHORIZATION_STRING; +import java.net.URISyntaxException; import java.sql.Connection; -import java.sql.DriverManager; import java.sql.SQLException; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; import java.sql.Statement; @Path("/deleteScan") diff --git a/src/main/java/rest/Scan.java b/src/main/java/rest/Scan.java index 2394c6f..e7e84b4 100644 --- a/src/main/java/rest/Scan.java +++ b/src/main/java/rest/Scan.java @@ -1,8 +1,6 @@ -package rest; +package rest; -import javax.persistence.Entity; - -public class Scan { +public class Scan { private int scanId; private String productId; private String eventId; diff --git a/src/main/java/rest/StoreData.java b/src/main/java/rest/StoreData.java index ae2925e..711f437 100644 --- a/src/main/java/rest/StoreData.java +++ b/src/main/java/rest/StoreData.java @@ -1,91 +1,21 @@ package rest; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.Set; -import dto.ScanObj; // import dto.ConnectDB; // import dto.Scan; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; +import org.hibernate.Session; +import org.hibernate.SessionFactory; +import org.hibernate.Transaction; +import org.hibernate.boot.Metadata; +import org.hibernate.boot.MetadataSources; +import org.hibernate.boot.registry.StandardServiceRegistry; +import org.hibernate.boot.registry.StandardServiceRegistryBuilder; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.Statement; - -import javax.ws.rs.DELETE; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import java.util.Set; -import java.util.stream.Collectors; -import javax.inject.Inject; -import javax.ws.rs.Consumes; - -import java.sql.*; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectWriter; - -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.Transaction; -import org.hibernate.boot.Metadata; -import org.hibernate.boot.MetadataSources; -import org.hibernate.boot.registry.StandardServiceRegistry; -import org.hibernate.boot.registry.StandardServiceRegistryBuilder; // import org.hibernate.EntityManager; -import jakarta.persistence.EntityManager; -import jakarta.persistence.Cacheable; -import jakarta.persistence.Column; -import jakarta.persistence.Entity; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.Id; -import jakarta.persistence.NamedQuery; -import jakarta.persistence.QueryHint; -import jakarta.persistence.SequenceGenerator; -import jakarta.persistence.Table; - -import org.eclipse.microprofile.rest.client.inject.RestClient; -import dto.ScanObj; - -import javax.inject.Inject; -import javax.validation.Valid; -import javax.ws.rs.Consumes; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.PUT; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.UUID; -import org.json.JSONObject; -import org.json.JSONArray; -import dto.ScanObj; -import dto.ConnectDB; -import dto.ScanObjPayload; - -import javax.ws.rs.PathParam; - -import static constants.HttpHeaders.AUTHORIZATION_STRING; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.Statement; @Path("/storeData") public class StoreData { diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 3ad9a1a..eca88b0 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -4,9 +4,4 @@ # couchdb.name=scan-results # couchdb.url=https://localhost:5984 -# quarkus.hibernate-orm.database.generation=drop-and-create - -# Kubernetes Secret -quarkus.openshift.env.secrets=kerberos-keytab - -quarkus.kerberos.keytab-path = ${kerberos-keytab} \ No newline at end of file +# quarkus.hibernate-orm.database.generation=drop-and-create \ No newline at end of file From 1ab0639941932aa5581dd95154fd28c971846d18 Mon Sep 17 00:00:00 2001 From: jperezde Date: Thu, 8 Jun 2023 11:31:17 +0200 Subject: [PATCH 12/26] Test keytab --- src/main/resources/application.properties | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index eca88b0..6933166 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -4,4 +4,7 @@ # couchdb.name=scan-results # couchdb.url=https://localhost:5984 -# quarkus.hibernate-orm.database.generation=drop-and-create \ No newline at end of file +# quarkus.hibernate-orm.database.generation=drop-and-create + +quarkus.kerberos.keytab-path= "file:///tmp/TASK1559577-openscanhub-wrapper.keytab" + From fee2bd340ff8f3ac5248e663892ef9c432a12cab Mon Sep 17 00:00:00 2001 From: jperezde Date: Fri, 9 Jun 2023 17:57:57 +0200 Subject: [PATCH 13/26] Added Kerberos auth to methods --- pom.xml | 2 +- src/main/docker/Dockerfile.jvm | 2 -- src/main/java/rest/CreateGetResource.java | 9 ++++-- src/main/java/rest/CreateScanRequest.java | 2 ++ src/main/java/rest/CreateStartScan.java | 2 ++ src/main/java/rest/UsersResource.java | 36 +++++++++++++++++++++++ src/main/resources/application.properties | 4 ++- 7 files changed, 51 insertions(+), 6 deletions(-) create mode 100644 src/main/java/rest/UsersResource.java diff --git a/pom.xml b/pom.xml index 285d46a..0f4a34f 100644 --- a/pom.xml +++ b/pom.xml @@ -10,7 +10,7 @@ 4.0.0 com.redhat.ncaughey - rest-json-quickstart + osh 1.0.0-SNAPSHOT 3.10.1 diff --git a/src/main/docker/Dockerfile.jvm b/src/main/docker/Dockerfile.jvm index 5a12f95..5ba77be 100644 --- a/src/main/docker/Dockerfile.jvm +++ b/src/main/docker/Dockerfile.jvm @@ -86,8 +86,6 @@ COPY --chown=185 target/quarkus-app/*.jar /deployments/ COPY --chown=185 target/quarkus-app/app/ /deployments/app/ COPY --chown=185 target/quarkus-app/quarkus/ /deployments/quarkus/ -RUN microdnf install krb5-server krb5-libs krb5-workstation -RUN cat /etc/krb5.conf EXPOSE 8080 USER 185 diff --git a/src/main/java/rest/CreateGetResource.java b/src/main/java/rest/CreateGetResource.java index 917d0a6..2c1c6bf 100644 --- a/src/main/java/rest/CreateGetResource.java +++ b/src/main/java/rest/CreateGetResource.java @@ -2,7 +2,11 @@ package rest; import dto.ConnectDB; import dto.ScanObj; +import io.quarkiverse.kerberos.KerberosPrincipal; +import io.quarkus.security.Authenticated; +import io.quarkus.security.identity.SecurityIdentity; +import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; @@ -14,14 +18,15 @@ import java.util.Collections; import java.util.LinkedHashMap; import java.util.Set; + // import org.hibernate.EntityManager; // @Path("/api/v1/[osh-scan]") @Path("/scanGet") +@Authenticated public class CreateGetResource { - // @Inject - // EntityManager em; + CreateScanService createScanService; diff --git a/src/main/java/rest/CreateScanRequest.java b/src/main/java/rest/CreateScanRequest.java index 4373dd5..6d0a833 100644 --- a/src/main/java/rest/CreateScanRequest.java +++ b/src/main/java/rest/CreateScanRequest.java @@ -1,6 +1,7 @@ package rest; import dto.*; +import io.quarkus.security.Authenticated; import org.eclipse.microprofile.rest.client.inject.RestClient; import org.json.JSONObject; @@ -14,6 +15,7 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; +@Authenticated @Path("/scanRequest") public class CreateScanRequest { diff --git a/src/main/java/rest/CreateStartScan.java b/src/main/java/rest/CreateStartScan.java index 66748ce..4bade54 100644 --- a/src/main/java/rest/CreateStartScan.java +++ b/src/main/java/rest/CreateStartScan.java @@ -2,6 +2,7 @@ package rest; import dto.ConnectDB; import dto.ScanObj; +import io.quarkus.security.Authenticated; import org.eclipse.microprofile.rest.client.inject.RestClient; import javax.ws.rs.PUT; @@ -13,6 +14,7 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; +@Authenticated @Path("/startScan") public class CreateStartScan { diff --git a/src/main/java/rest/UsersResource.java b/src/main/java/rest/UsersResource.java new file mode 100644 index 0000000..f68ce18 --- /dev/null +++ b/src/main/java/rest/UsersResource.java @@ -0,0 +1,36 @@ +package rest; + +import dto.ConnectDB; +import dto.ScanObj; +import io.quarkiverse.kerberos.KerberosPrincipal; +import io.quarkus.security.Authenticated; +import io.quarkus.security.identity.SecurityIdentity; + +import javax.inject.Inject; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Set; +import javax.ws.rs.Produces; + +@Path("/testKerberos") +@Authenticated +public class UsersResource { + @Inject + SecurityIdentity identity; + @Inject + KerberosPrincipal kerberosPrincipal; + + @GET + @Path("/me") + @Produces("text/plain") + public String me() { + return identity.getPrincipal().getName(); + } +} \ No newline at end of file diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 6933166..d698fc5 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -6,5 +6,7 @@ # quarkus.hibernate-orm.database.generation=drop-and-create -quarkus.kerberos.keytab-path= "file:///tmp/TASK1559577-openscanhub-wrapper.keytab" +quarkus.kerberos.keytab-path= HTTP_osh-pct-security-tooling.apps.ocp-c1.prod.psi.redhat.com@IPA.REDHAT.COM.keytab +quarkus.kerberos.service-principal-name= HTTP/osh-pct-security-tooling.apps.ocp-c1.prod.psi.redhat.com@IPA.REDHAT.COM + From c61e6fb0f639027321959fac8bc33611bdaa0d7b Mon Sep 17 00:00:00 2001 From: Leonid Bossis Date: Fri, 9 Jun 2023 12:21:37 -0400 Subject: [PATCH 14/26] checkpoint #1 --- src/main/java/dto/BrewObjPayload.java | 23 +++---- src/main/java/dto/ConnectDB.java | 19 ++---- src/main/java/dto/GitObjPayload.java | 17 ++--- src/main/java/dto/PncObjPayload.java | 13 ++-- src/main/java/dto/ScanObj.java | 3 +- src/main/java/dto/ScanObjPayload.java | 16 +++-- src/main/java/rest/CreateGetResource.java | 10 +-- src/main/java/rest/CreateScanRequest.java | 22 ++++--- src/main/java/rest/CreateScanResource.java | 15 +++-- src/main/java/rest/CreateStartScan.java | 18 ++++-- src/main/java/rest/RemoveScan.java | 66 +++++-------------- src/test/java/dto/TestPayload.java | 75 ++++++++++++++++++++++ 12 files changed, 178 insertions(+), 119 deletions(-) create mode 100644 src/test/java/dto/TestPayload.java diff --git a/src/main/java/dto/BrewObjPayload.java b/src/main/java/dto/BrewObjPayload.java index 2252837..57c2ad9 100644 --- a/src/main/java/dto/BrewObjPayload.java +++ b/src/main/java/dto/BrewObjPayload.java @@ -1,19 +1,20 @@ package dto; -import java.net.URISyntaxException; +import org.json.JSONException; import org.json.JSONObject; -import static constants.HttpHeaders.AUTHORIZATION_STRING; - public class BrewObjPayload { - public static BrewObj constructScanPayload(JSONObject brewObj) throws URISyntaxException { + + public static BrewObj constructScanPayload(JSONObject jsonObj) throws JSONException { return new BrewObj( - brewObj.getString("buildSystemType"), - brewObj.getString("brewId"), - brewObj.getString("brewNVR"), - brewObj.getString("pncId"), - brewObj.getString("artifactType"), - brewObj.getString("fileName"), - brewObj.getBoolean("builtfromSource")); + jsonObj.getString("buildSystemType"), + jsonObj.getString("brewId"), + jsonObj.getString("brewNVR"), + jsonObj.getString("pncId"), + jsonObj.getString("artifactType"), + jsonObj.getString("fileName"), + jsonObj.getBoolean("builtfromSource")); } + + private BrewObjPayload() {} } diff --git a/src/main/java/dto/ConnectDB.java b/src/main/java/dto/ConnectDB.java index db73c09..ff7f535 100644 --- a/src/main/java/dto/ConnectDB.java +++ b/src/main/java/dto/ConnectDB.java @@ -1,5 +1,7 @@ package dto; +import org.json.JSONException; + import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; @@ -9,24 +11,15 @@ import static constants.PSGQL.password; import static constants.PSGQL.url; public class ConnectDB { - // private final String url = "jdbc:postgresql://localhost:5432/scandb"; - // private final String user = "postgres"; - // private final String password = "password"; - /** - * Connect to the PostgreSQL database - * - * @return a Connection object - */ - public Connection connect() { - Connection conn = null; + public Connection connect() throws JSONException { try { - conn = DriverManager.getConnection(url, user, password); + Connection conn = DriverManager.getConnection(url, user, password); System.out.println("Connected to the PostgreSQL server successfully."); + return conn; } catch (SQLException e) { System.out.println(e.getMessage()); } - - return conn; + return null; } } diff --git a/src/main/java/dto/GitObjPayload.java b/src/main/java/dto/GitObjPayload.java index 9bc5ffb..ad8bc5b 100644 --- a/src/main/java/dto/GitObjPayload.java +++ b/src/main/java/dto/GitObjPayload.java @@ -1,16 +1,17 @@ package dto; -import java.net.URISyntaxException; +import org.json.JSONException; import org.json.JSONObject; -import static constants.HttpHeaders.AUTHORIZATION_STRING; - public class GitObjPayload { - public static GitObj constructScanPayload(JSONObject gitObj) throws URISyntaxException { + + public static GitObj constructScanPayload(JSONObject jsonObj) throws JSONException { return new GitObj( - gitObj.getString("buildSystemType"), - gitObj.getString("repository"), - gitObj.getString("reference"), - gitObj.getString("commitId")); + jsonObj.getString("buildSystemType"), + jsonObj.getString("repository"), + jsonObj.getString("reference"), + jsonObj.getString("commitId")); } + + private GitObjPayload() {} } diff --git a/src/main/java/dto/PncObjPayload.java b/src/main/java/dto/PncObjPayload.java index e8b106c..ad43edd 100644 --- a/src/main/java/dto/PncObjPayload.java +++ b/src/main/java/dto/PncObjPayload.java @@ -1,14 +1,15 @@ package dto; -import java.net.URISyntaxException; +import org.json.JSONException; import org.json.JSONObject; -import static constants.HttpHeaders.AUTHORIZATION_STRING; - public class PncObjPayload { - public static PncObj constructScanPayload(JSONObject pncObj) throws URISyntaxException { + + public static PncObj constructScanPayload(JSONObject jsonObj) throws JSONException { return new PncObj( - pncObj.getString("buildSystemType"), - pncObj.getString("buildId")); + jsonObj.getString("buildSystemType"), + jsonObj.getString("buildId")); } + + private PncObjPayload() {} } diff --git a/src/main/java/dto/ScanObj.java b/src/main/java/dto/ScanObj.java index 8c04963..54fcc69 100644 --- a/src/main/java/dto/ScanObj.java +++ b/src/main/java/dto/ScanObj.java @@ -6,9 +6,10 @@ import lombok.Getter; import lombok.ToString; import lombok.extern.jackson.Jacksonized; +import java.io.Serializable; + //still need to fix all the scan objects to be significantly less poorly written //TODO add interface for the scan objects (is probably the cleanest solution) -import java.io.Serializable; @ToString @Getter diff --git a/src/main/java/dto/ScanObjPayload.java b/src/main/java/dto/ScanObjPayload.java index b9a8be1..c4e49e4 100644 --- a/src/main/java/dto/ScanObjPayload.java +++ b/src/main/java/dto/ScanObjPayload.java @@ -1,15 +1,17 @@ package dto; -import java.net.URISyntaxException; +import org.json.JSONException; import org.json.JSONObject; public class ScanObjPayload { - public static ScanObj constructScanPayload(JSONObject scanObj) throws URISyntaxException { + public static ScanObj constructScanPayload(JSONObject jsonObj) throws JSONException { return new ScanObj( - scanObj.getString("scanID"), - scanObj.getString("offeringId"), - scanObj.getString("eventID"), - scanObj.getString("isManagedService"), - scanObj.getString("componentList")); + jsonObj.getString("scanID"), + jsonObj.getString("offeringId"), + jsonObj.getString("eventID"), + jsonObj.getString("isManagedService"), + jsonObj.getString("componentList")); } + + private ScanObjPayload() {} } diff --git a/src/main/java/rest/CreateGetResource.java b/src/main/java/rest/CreateGetResource.java index 6fd3858..8f937db 100644 --- a/src/main/java/rest/CreateGetResource.java +++ b/src/main/java/rest/CreateGetResource.java @@ -3,14 +3,17 @@ package rest; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Set; + import dto.ScanObj; import dto.ConnectDB; + import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; -import java.sql.*; - -// import org.hibernate.EntityManager; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; // @Path("/api/v1/[osh-scan]") @Path("/scanGet") @@ -22,7 +25,6 @@ public class CreateGetResource { private Set Scans = Collections.newSetFromMap(Collections.synchronizedMap(new LinkedHashMap<>())); - public CreateGetResource() { } diff --git a/src/main/java/rest/CreateScanRequest.java b/src/main/java/rest/CreateScanRequest.java index bbf0d11..d59c476 100644 --- a/src/main/java/rest/CreateScanRequest.java +++ b/src/main/java/rest/CreateScanRequest.java @@ -1,13 +1,5 @@ package rest; -import org.eclipse.microprofile.rest.client.inject.RestClient; -import javax.validation.Valid; -import javax.ws.rs.Consumes; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import java.net.URISyntaxException; -import java.sql.*; -import org.json.JSONObject; import dto.BrewObj; import dto.ConnectDB; import dto.BrewObjPayload; @@ -16,6 +8,18 @@ import dto.GitObjPayload; import dto.PncObj; import dto.PncObjPayload; +import org.eclipse.microprofile.rest.client.inject.RestClient; +import org.json.JSONObject; + +import javax.validation.Valid; +import javax.ws.rs.Consumes; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import java.net.URISyntaxException; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.SQLException; + @Path("/scanRequest") public class CreateScanRequest { @@ -27,7 +31,7 @@ public class CreateScanRequest { @Path("/brew") @Consumes({ "application/json" }) // in theory should take List to clean it up - public BrewObj invokeBrewScanAnalyze(@Valid String scanInvocation) throws URISyntaxException { + public BrewObj invokeBrewScanAnalyze(@Valid String scanInvocation) throws URISyntaxException { JSONObject jsonData = new JSONObject(scanInvocation); BrewObj brewObj = BrewObjPayload.constructScanPayload(jsonData); ConnectDB connectDB = new ConnectDB(); diff --git a/src/main/java/rest/CreateScanResource.java b/src/main/java/rest/CreateScanResource.java index df83367..916786e 100644 --- a/src/main/java/rest/CreateScanResource.java +++ b/src/main/java/rest/CreateScanResource.java @@ -1,17 +1,20 @@ package rest; -import dto.*; +import dto.ConnectDB; +import dto.ScanObjPayload; +import dto.ScanObj; + import org.eclipse.microprofile.rest.client.inject.RestClient; +import org.json.JSONObject; + import javax.validation.Valid; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.Path; import java.net.URISyntaxException; -import java.sql.*; -import org.json.JSONObject; -import dto.ScanObj; - -import static constants.HttpHeaders.AUTHORIZATION_STRING; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.SQLException; @Path("/") public class CreateScanResource { diff --git a/src/main/java/rest/CreateStartScan.java b/src/main/java/rest/CreateStartScan.java index 85fd1d2..a689716 100644 --- a/src/main/java/rest/CreateStartScan.java +++ b/src/main/java/rest/CreateStartScan.java @@ -1,14 +1,18 @@ package rest; import dto.ConnectDB; -import org.eclipse.microprofile.rest.client.inject.RestClient; import dto.ScanObj; + +import org.eclipse.microprofile.rest.client.inject.RestClient; + import javax.ws.rs.Path; import javax.ws.rs.PUT; import javax.ws.rs.PathParam; import java.net.URISyntaxException; -import java.sql.*; - +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; @Path("/startScan") public class CreateStartScan { @@ -23,8 +27,12 @@ public class CreateStartScan { ConnectDB connectDB = new ConnectDB(); try (Connection conn = connectDB.connect()) { finalScan = selectDataForArchiving(conn, scanId); - ArchiveSelectedScans(conn, finalScan); - PostArchivingCleanup(conn, scanId); + if (finalScan != null) { + ArchiveSelectedScans(conn, finalScan); + PostArchivingCleanup(conn, scanId); + } else { + System.out.println("No data match found for scan ID=" + scanId); + } } catch (SQLException e) { System.out.println(e.getMessage()); } diff --git a/src/main/java/rest/RemoveScan.java b/src/main/java/rest/RemoveScan.java index efc0d19..17e6cc6 100644 --- a/src/main/java/rest/RemoveScan.java +++ b/src/main/java/rest/RemoveScan.java @@ -1,70 +1,38 @@ package rest; +import dto.ConnectDB; + import org.eclipse.microprofile.rest.client.inject.RestClient; -import dto.ScanObj; -import javax.inject.Inject; -import javax.validation.Valid; -import javax.ws.rs.Consumes; -import javax.ws.rs.POST; import javax.ws.rs.Path; -import javax.ws.rs.PUT; import javax.ws.rs.DELETE; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.UUID; -import org.json.JSONObject; -import org.json.JSONArray; -import dto.ScanObj; -import dto.ConnectDB; -import dto.ScanObjPayload; - import javax.ws.rs.PathParam; - -import static constants.HttpHeaders.AUTHORIZATION_STRING; import java.sql.Connection; -import java.sql.DriverManager; +import java.sql.PreparedStatement; import java.sql.SQLException; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.Statement; - @Path("/deleteScan") public class RemoveScan { - // @Inject @RestClient CreateScanService createScanService; - // ScanObjPayload scanObjPayload; @DELETE @Path("/{scanId}") - public boolean invokeScanAnalyze(@PathParam("scanId") String scanId) throws URISyntaxException { + public boolean invokeScanAnalyze(@PathParam("scanId") String scanId) { + Boolean rc = false; + //send task to the actual interface here using the resultset returned (should multiple scanids be allowed): + //once the task is complete AND we have confirmation that the scan is done run the following sql + String qry = "DELETE FROM scans WHERE scanid=?"; ConnectDB connectDB = new ConnectDB(); - Connection conn = connectDB.connect(); - //this is ugly needs to berewritten - Statement stmt = null; - ScanObj finalScan = null; - //fix this - Boolean success = false; - String sql = "DELETE FROM scans WHERE scanid=" + scanId; - //need to add figure out an archieve system and wether its nessacery (archieve value??) - try{ - stmt = conn.createStatement(); - //TODO add proper checks - stmt.executeUpdate(sql); - //send task to the actual interface here using the resultset returned (should multiple scanids be allowed): - //once the task is complete AND we have confirmation that the scan is done run the following sql - conn.close(); - } catch (SQLException e){ - System.out.println(e); - } - success = true; - return success; + try(Connection conn = connectDB.connect(); + PreparedStatement pstmt = conn.prepareStatement(qry)) { + pstmt.setString(1, scanId); + pstmt.executeUpdate(); + rc = true; + } catch (SQLException e) { + System.out.println(e.getMessage()); + } + return rc; } } diff --git a/src/test/java/dto/TestPayload.java b/src/test/java/dto/TestPayload.java new file mode 100644 index 0000000..aab36ee --- /dev/null +++ b/src/test/java/dto/TestPayload.java @@ -0,0 +1,75 @@ +package dto; + +import org.json.JSONObject; +import org.junit.jupiter.api.Test; + +class TestPayload { + + @Test + void TestBrew() { + JSONObject jsonObject = new JSONObject(); + jsonObject.put("buildSystemType", "brew"); + jsonObject.put("brewId", "1"); + jsonObject.put("brewNVR", "1.1.0"); + jsonObject.put("pncId", "153"); + jsonObject.put("artifactType", "arti1"); + jsonObject.put("fileName", "myfile1"); + jsonObject.put("builtfromSource", true); + + BrewObj brewObj1 = BrewObjPayload.constructScanPayload(jsonObject); + BrewObj brewObj2 = new BrewObj( + jsonObject.getString("buildSystemType"), + jsonObject.getString("brewId"), + jsonObject.getString("brewNVR"), + jsonObject.getString("pncId"), + jsonObject.getString("artifactType"), + jsonObject.getString("fileName"), + jsonObject.getBoolean("builtfromSource")); + System.out.println("BrewObj1: " + brewObj1.toString()); + System.out.println("BrewObj2: " + brewObj2.toString()); + assert(brewObj1.getBuildSystemType().equals(brewObj2.getBuildSystemType())); + assert(brewObj1.getBrewId().equals(brewObj2.getBrewId())); + assert(brewObj1.getBrewNvr().equals(brewObj2.getBrewNvr())); + assert(brewObj1.getPncId().equals(brewObj2.getPncId())); + assert(brewObj1.getArtifactType().equals(brewObj2.getArtifactType())); + assert(brewObj1.getFileName().equals(brewObj2.getFileName())); + assert(brewObj1.getBuiltFromSource() == brewObj2.getBuiltFromSource()); + } + + @Test + void TestGit() { + JSONObject jsonObject = new JSONObject(); + jsonObject.put("buildSystemType", "git"); + jsonObject.put("repository", "repo"); + jsonObject.put("reference", "ref"); + jsonObject.put("commitId", "comid"); + + GitObj gitObj1 = GitObjPayload.constructScanPayload(jsonObject); + GitObj gitObj2 = new GitObj( + jsonObject.getString("buildSystemType"), + jsonObject.getString("repository"), + jsonObject.getString("reference"), + jsonObject.getString("commitId")); + System.out.println("GitObj1: " + gitObj1.toString()); + System.out.println("GitObj2: " + gitObj2.toString()); + assert(gitObj1.getBuildSystemType().equals(gitObj2.getBuildSystemType())); + assert(gitObj1.getRepository().equals(gitObj2.getRepository())); + assert(gitObj1.getReference().equals(gitObj2.getReference())); + assert(gitObj1.getCommitId().equals(gitObj2.getCommitId())); + } + + @Test + void TestPnc() { + JSONObject jsonObject = new JSONObject(); + jsonObject.put("buildSystemType", "pnc"); + jsonObject.put("buildId", "153"); + + PncObj pncObj1 = PncObjPayload.constructScanPayload(jsonObject); + PncObj pncObj2 = new PncObj(jsonObject.getString("buildSystemType"), jsonObject.getString("buildId")); + System.out.println("PncObj1: " + pncObj1.toString()); + System.out.println("PncObj2: " + pncObj2.toString()); + assert(pncObj1.getBuildSystemType().equals(pncObj2.getBuildSystemType())); + assert(pncObj1.getBuildId().equals(pncObj2.getBuildId())); + } + +} From 8975fff63dafffe5d8375d8c7b2a778b2c0983b7 Mon Sep 17 00:00:00 2001 From: Leonid Bossis Date: Fri, 9 Jun 2023 15:53:12 -0400 Subject: [PATCH 15/26] change db table field names from mixed naming convention to pythonic convention xxx_yyy_zzz and stop using CamelCase --- .gitignore | 12 + schema/populate.sql | 252 ++++++++++----------- schema/schema.sql | 60 ++--- src/main/java/dto/BrewObj.java | 2 +- src/main/java/dto/BrewObjPayload.java | 14 +- src/main/java/dto/ConnectDB.java | 4 +- src/main/java/dto/GitObj.java | 2 +- src/main/java/dto/GitObjPayload.java | 4 +- src/main/java/dto/PncObj.java | 2 +- src/main/java/dto/PncObjPayload.java | 4 +- src/main/java/dto/ScanObj.java | 12 +- src/main/java/dto/ScanObjPayload.java | 10 +- src/main/java/rest/CreateGetResource.java | 12 +- src/main/java/rest/CreateScanRequest.java | 1 - src/main/java/rest/CreateScanResource.java | 14 +- src/main/java/rest/CreateStartScan.java | 45 ++-- src/main/java/rest/RemoveScan.java | 4 +- src/test/java/dto/TestPayload.java | 69 ++++-- 18 files changed, 281 insertions(+), 242 deletions(-) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c65012f --- /dev/null +++ b/.gitignore @@ -0,0 +1,12 @@ +.dcignore +.idea +*.iml + +dev/ + +# Maven +target/ +pom.xml.tag +pom.xml.releaseBackup +pom.xml.versionsBackup +release.properties diff --git a/schema/populate.sql b/schema/populate.sql index ee69404..d2f5584 100644 --- a/schema/populate.sql +++ b/schema/populate.sql @@ -1,126 +1,126 @@ -INSERT INTO osh.offerings(offeringId,description) VALUES ('ansible-automation-platform','Ansible Automation Platform (AAP)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('advisor','Insights Advisor'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('ansible-on-aws','Ansible on AWS'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('ansible-on-azure','Ansible on Azure'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('ansible-on-gcp','Ansible on GCP'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('ansible-wisdom-service','Ansible Wisdom Service'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('cert-manager','cert-manager Operator for Red Hat OpenShift'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('compliance','Insights Compliance'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('connected-customer-experience','Connected Customer Experience (CCX)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('cost-management','Cost Management'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('custom-metric-autoscaler','OpenShift Custom Metrics Autoscaler'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('developer-sandbox-for-red-hat-openshift','Developer Sandbox for Red Hat OpenShift'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('dotnet','.NET'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('drift','Insights Drift'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('eclipse-vertx','Red Hat build of Eclipse Vert.x'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('edge-management','Edge Management'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('eventing','Insights Eventing'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('fastdatapath','RHEL Fast Datapath'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('host-management-services','Host Management Services'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('hosted-control-planes','Hosted Control Planes (Hypershift)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('hybrid-application-console','Hybrid Application Console (HAC)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('insights-essential','Insights Essentials'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('kernel-module-management','Kernel Module Management'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('logging-subsystem-for-red-hat-openshift','Logging Subsystem for Red Hat OpenShift'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('lvms-operator','LVMS Operator'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('malware-detection','Insights Malware Detection'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('mgmt-platform','Management Platform'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('migration-toolkit-for-applications','Migration Toolkit for Applications (MTA)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('migration-toolkit-for-containers','Migration Toolkit for Containers (MTC)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('migration-toolkit-for-runtimes','Migration Toolkit for Runtimes (MTR)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('migration-toolkit-for-virtualization','Migration Toolkit for Virtualization (MTV)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('network-observability-operator','Network Observability Operator'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('node-healthcheck-operator','Node HealthCheck Operator'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('node-maintenance-operator','Node Maintenance Operator'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('nvidia-gpu-add-on','NVIDIA GPU Add-On'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('oadp','OpenShift API for Data Protection'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-container-platform','Openshift Container Platform (OCP)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-container-storage','OpenShift Container Storage (OCS)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-data-foundation-managed-service','Red Hat OpenShift Data Foundation Managed Service'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-dedicated','OpenShift Dedicated (OSD/ROSA)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-developer-tools-and-services-helm','OpenShift Developer Tools and Services (Helm)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-developer-tools-and-services-jenkins','OpenShift Developer Tools and Services (Jenkins)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-distributed-tracing','OpenShift Distributed Tracing'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-on-azure','Openshift on Azure (ARO)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-run-once-duration-override-operator','OpenShift Run Once Duration Override Operator'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-sandboxed-containers','Openshift Sandboxed Containers'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-secondary-scheduler-operator','OpenShift Secondary Scheduler Operator'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-servicemesh','OpenShift Service Mesh'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-virtualization','OpenShift Virtualization (CNV)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-web-terminal-operator','OpenShift Web Terminal Operator'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('openshift-winc','Windows Container Support for OpenShift'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('patch','Insights Patch'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('product-discovery','Product Discovery'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-3scale-api-management-platform','Red Hat 3scale API Management Platform'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-advanced-cluster-management','Red Hat Advanced Cluster Management (RHACM)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-amq-broker','Red Hat AMQ Broker'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-amq-clients','Red Hat AMQ Clients'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-amq-interconnect','Red Hat AMQ Interconnect'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-amq-online','Red Hat AMQ Online'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-amq-streams','Red Hat AMQ Streams'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-build-apicurio-registry','Red Hat build of Apicurio Registry (formerly known as Integration Service Registry)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-build-quarkus','Red Hat Build of Quarkus'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-camel-extensions-quarkus','Red Hat Camel Extensions for Quarkus'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-camel-k','Red Hat Camel K'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-camel-spring-boot','Red Hat Camel for Spring Boot'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-ceph-storage','Red Hat Ceph Storage'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-certificate-system','Red Hat Certificate System (RHCS)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-certification-program','Red Hat Certification Program (rhcertification)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-code-quarkus','Red Hat Code Quarkus'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-core-os','Red Hat CoreOS'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-data-grid','Red Hat Data Grid'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-debezium','Red Hat Debezium'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-decision-manager','Red Hat Decision Manager'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-developer-hub','Red Hat Developer Hub'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-developer-toolset','Red Hat Developer Toolset (DTS)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-devtools-compilers','Red Hat Developer Tools (DevTools Compilers)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-directory-server','Red Hat Directory Server (RHDS)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-10','Red Hat Enterprise Linux (RHEL) 10'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-6','Red Hat Enterprise Linux (RHEL) 6'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-7','Red Hat Enterprise Linux (RHEL) 7'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-8','Red Hat Enterprise Linux (RHEL) 8'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-enterprise-linux-9','Red Hat Enterprise Linux (RHEL) 9'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-fuse','Red Hat Fuse'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-gluster-storage','Red Hat Gluster Storage'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-in-vehicle-os','Red Hat In-Vehicle Operating System (RHIVOS)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-jboss-core-services','Red Hat JBoss Core Services'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-jboss-eap','Red Hat JBoss Enterprise Application Platform (EAP)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-jboss-web-server','Red Hat JBoss Web Server'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-observability-service','Red Hat Observability Service'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-open-database-access','Red Hat OpenShift Database Access'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-open-shift-data-science','Red Hat OpenShift Data Science (RHODS)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openjdk','Red Hat OpenJDK'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-api-management','Red Hat OpenShift API Management'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-builds-v2','Red Hat OpenShift Builds V2'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-connectors','Red Hat OpenShift Connectors (RHOC)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-control-plane-service','Red Hat OpenShift Control Plane Service'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-data-foundation','Red Hat OpenShift Data Foundation'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-dev-spaces','Red Hat OpenShift Dev Spaces'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-gitops','Red Hat OpenShift GitOps'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-local','Red Hat OpenShift Local'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-pipelines','Red Hat OpenShift Pipelines'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-serverless','Red Hat OpenShift Serverless'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-service-registry','Red Hat OpenShift Service Registry'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openshift-streams-apache-kafka','Red Hat OpenShift Streams for Apache Kafka (RHOSAK)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-openstack-platform','Red Hat OpenStack Platform (RHOSP)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-optaplanner','Red Hat Optaplanner'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-plug-ins-for-backstage','Red Hat Plug-ins for Backstage'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-process-automation-manager','Red Hat Process Automation Manager'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-quarkus-registry','Red Hat Quarkus Registry'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-quay','Red Hat Quay'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-satellite','Red Hat Satellite'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-service-interconnect','Red Hat Service Interconnect (formerly known as Application Interconnect)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-single-sign-on','Red Hat Single Sign-On (RHSSO)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-software-collections','Red Hat Software Collections'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-support-for-spring-boot','Red Hat support for Spring Boot'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-trusted-application-pipeline','Red Hat Trusted Application Pipeline (RHTAP)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-update-infrastructure','Red Hat Update Infrastructure (RHUI)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('red-hat-virtualization','Red Hat Virtualization'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('resource-optimization','Insights Resource Optimization (ROS)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('rh-vulnerability-for-ocp','Insights Vulnerability for OCP'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('rhacs','Red Hat Advanced Cluster Security for Kubernetes (RHACS)'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('self-node-remediation','Self Node Remediation'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('subscription-central','Subscription Central'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('subscription-watch','Subscription Watch'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('telco-sw-components','Telco SW Components'); -INSERT INTO osh.offerings(offeringId,description) VALUES ('vulnerability','Vulnerability'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-automation-platform','Ansible Automation Platform (AAP)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('advisor','Insights Advisor'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-on-aws','Ansible on AWS'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-on-azure','Ansible on Azure'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-on-gcp','Ansible on GCP'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('ansible-wisdom-service','Ansible Wisdom Service'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('cert-manager','cert-manager Operator for Red Hat OpenShift'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('compliance','Insights Compliance'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('connected-customer-experience','Connected Customer Experience (CCX)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('cost-management','Cost Management'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('custom-metric-autoscaler','OpenShift Custom Metrics Autoscaler'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('developer-sandbox-for-red-hat-openshift','Developer Sandbox for Red Hat OpenShift'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('dotnet','.NET'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('drift','Insights Drift'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('eclipse-vertx','Red Hat build of Eclipse Vert.x'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('edge-management','Edge Management'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('eventing','Insights Eventing'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('fastdatapath','RHEL Fast Datapath'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('host-management-services','Host Management Services'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('hosted-control-planes','Hosted Control Planes (Hypershift)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('hybrid-application-console','Hybrid Application Console (HAC)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('insights-essential','Insights Essentials'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('kernel-module-management','Kernel Module Management'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('logging-subsystem-for-red-hat-openshift','Logging Subsystem for Red Hat OpenShift'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('lvms-operator','LVMS Operator'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('malware-detection','Insights Malware Detection'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('mgmt-platform','Management Platform'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('migration-toolkit-for-applications','Migration Toolkit for Applications (MTA)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('migration-toolkit-for-containers','Migration Toolkit for Containers (MTC)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('migration-toolkit-for-runtimes','Migration Toolkit for Runtimes (MTR)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('migration-toolkit-for-virtualization','Migration Toolkit for Virtualization (MTV)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('network-observability-operator','Network Observability Operator'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('node-healthcheck-operator','Node HealthCheck Operator'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('node-maintenance-operator','Node Maintenance Operator'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('nvidia-gpu-add-on','NVIDIA GPU Add-On'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('oadp','OpenShift API for Data Protection'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-container-platform','Openshift Container Platform (OCP)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-container-storage','OpenShift Container Storage (OCS)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-data-foundation-managed-service','Red Hat OpenShift Data Foundation Managed Service'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-dedicated','OpenShift Dedicated (OSD/ROSA)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-developer-tools-and-services-helm','OpenShift Developer Tools and Services (Helm)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-developer-tools-and-services-jenkins','OpenShift Developer Tools and Services (Jenkins)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-distributed-tracing','OpenShift Distributed Tracing'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-on-azure','Openshift on Azure (ARO)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-run-once-duration-override-operator','OpenShift Run Once Duration Override Operator'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-sandboxed-containers','Openshift Sandboxed Containers'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-secondary-scheduler-operator','OpenShift Secondary Scheduler Operator'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-servicemesh','OpenShift Service Mesh'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-virtualization','OpenShift Virtualization (CNV)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-web-terminal-operator','OpenShift Web Terminal Operator'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('openshift-winc','Windows Container Support for OpenShift'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('patch','Insights Patch'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('product-discovery','Product Discovery'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-3scale-api-management-platform','Red Hat 3scale API Management Platform'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-advanced-cluster-management','Red Hat Advanced Cluster Management (RHACM)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-broker','Red Hat AMQ Broker'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-clients','Red Hat AMQ Clients'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-interconnect','Red Hat AMQ Interconnect'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-online','Red Hat AMQ Online'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-amq-streams','Red Hat AMQ Streams'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-build-apicurio-registry','Red Hat build of Apicurio Registry (formerly known as Integration Service Registry)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-build-quarkus','Red Hat Build of Quarkus'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-camel-extensions-quarkus','Red Hat Camel Extensions for Quarkus'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-camel-k','Red Hat Camel K'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-camel-spring-boot','Red Hat Camel for Spring Boot'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-ceph-storage','Red Hat Ceph Storage'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-certificate-system','Red Hat Certificate System (RHCS)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-certification-program','Red Hat Certification Program (rhcertification)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-code-quarkus','Red Hat Code Quarkus'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-core-os','Red Hat CoreOS'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-data-grid','Red Hat Data Grid'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-debezium','Red Hat Debezium'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-decision-manager','Red Hat Decision Manager'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-developer-hub','Red Hat Developer Hub'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-developer-toolset','Red Hat Developer Toolset (DTS)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-devtools-compilers','Red Hat Developer Tools (DevTools Compilers)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-directory-server','Red Hat Directory Server (RHDS)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-10','Red Hat Enterprise Linux (RHEL) 10'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-6','Red Hat Enterprise Linux (RHEL) 6'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-7','Red Hat Enterprise Linux (RHEL) 7'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-8','Red Hat Enterprise Linux (RHEL) 8'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-enterprise-linux-9','Red Hat Enterprise Linux (RHEL) 9'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-fuse','Red Hat Fuse'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-gluster-storage','Red Hat Gluster Storage'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-in-vehicle-os','Red Hat In-Vehicle Operating System (RHIVOS)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-jboss-core-services','Red Hat JBoss Core Services'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-jboss-eap','Red Hat JBoss Enterprise Application Platform (EAP)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-jboss-web-server','Red Hat JBoss Web Server'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-observability-service','Red Hat Observability Service'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-open-database-access','Red Hat OpenShift Database Access'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-open-shift-data-science','Red Hat OpenShift Data Science (RHODS)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openjdk','Red Hat OpenJDK'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-api-management','Red Hat OpenShift API Management'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-builds-v2','Red Hat OpenShift Builds V2'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-connectors','Red Hat OpenShift Connectors (RHOC)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-control-plane-service','Red Hat OpenShift Control Plane Service'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-data-foundation','Red Hat OpenShift Data Foundation'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-dev-spaces','Red Hat OpenShift Dev Spaces'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-gitops','Red Hat OpenShift GitOps'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-local','Red Hat OpenShift Local'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-pipelines','Red Hat OpenShift Pipelines'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-serverless','Red Hat OpenShift Serverless'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-service-registry','Red Hat OpenShift Service Registry'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openshift-streams-apache-kafka','Red Hat OpenShift Streams for Apache Kafka (RHOSAK)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-openstack-platform','Red Hat OpenStack Platform (RHOSP)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-optaplanner','Red Hat Optaplanner'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-plug-ins-for-backstage','Red Hat Plug-ins for Backstage'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-process-automation-manager','Red Hat Process Automation Manager'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-quarkus-registry','Red Hat Quarkus Registry'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-quay','Red Hat Quay'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-satellite','Red Hat Satellite'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-service-interconnect','Red Hat Service Interconnect (formerly known as Application Interconnect)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-single-sign-on','Red Hat Single Sign-On (RHSSO)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-software-collections','Red Hat Software Collections'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-support-for-spring-boot','Red Hat support for Spring Boot'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-trusted-application-pipeline','Red Hat Trusted Application Pipeline (RHTAP)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-update-infrastructure','Red Hat Update Infrastructure (RHUI)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('red-hat-virtualization','Red Hat Virtualization'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('resource-optimization','Insights Resource Optimization (ROS)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('rh-vulnerability-for-ocp','Insights Vulnerability for OCP'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('rhacs','Red Hat Advanced Cluster Security for Kubernetes (RHACS)'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('self-node-remediation','Self Node Remediation'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('subscription-central','Subscription Central'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('subscription-watch','Subscription Watch'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('telco-sw-components','Telco SW Components'); +INSERT INTO osh.offerings(offering_id,description) VALUES ('vulnerability','Vulnerability'); diff --git a/schema/schema.sql b/schema/schema.sql index fe05d59..f817daa 100644 --- a/schema/schema.sql +++ b/schema/schema.sql @@ -3,59 +3,59 @@ CREATE SCHEMA osh; GRANT USAGE ON SCHEMA osh TO postgres; CREATE TABLE IF NOT EXISTS osh.offerings( - offeringId VARCHAR(100), + offering_id VARCHAR(100), description VARCHAR(200), PRIMARY KEY (offeringId) ); CREATE TABLE IF NOT EXISTS osh.results( - resultsId SERIAL, + results_id SERIAL, datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, state BOOLEAN, logs bytea, task_reference VARCHAR(50), - PRIMARY KEY (resultsId) + PRIMARY KEY (results_id) ); CREATE TABLE IF NOT EXISTS osh.scans( - scanID SERIAL, - offeringId VARCHAR(100), - eventID VARCHAR(100) NOT NULL, - isManagedService BOOLEAN NOT NULL, - componentList VARCHAR(100), + scan_id SERIAL, + offering_id VARCHAR(100), + event_id VARCHAR(100) NOT NULL, + is_managed_service BOOLEAN NOT NULL, + component_list VARCHAR(100), datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, owner VARCHAR(50) NOT NULL, results SERIAL, status VARCHAR (50) CONSTRAINT valid_status CHECK(status in ('PENDING', 'DELETED', 'COMPLETED', 'IN PROGRESS')), last_updated TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, - PRIMARY KEY(scanID), - FOREIGN KEY (offeringId) REFERENCES osh.offerings(offeringId), - FOREIGN KEY (results) REFERENCES osh.results(resultsId) + PRIMARY KEY(scan_id), + FOREIGN KEY (offering_id) REFERENCES osh.offerings(offering_id), + FOREIGN KEY (results) REFERENCES osh.results(results_id) ); CREATE TABLE IF NOT EXISTS osh.archive( - scanID SERIAL, - offeringId VARCHAR(100), - eventID VARCHAR(100) NOT NULL, - isManagedService BOOLEAN NOT NULL, - componentList VARCHAR(100), + scan_id SERIAL, + offering_id VARCHAR(100), + event_id VARCHAR(100) NOT NULL, + is_managed_service BOOLEAN NOT NULL, + component_list VARCHAR(100), datetime TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, owner VARCHAR(50) NOT NULL, results SERIAL, status VARCHAR (50) CONSTRAINT valid_status CHECK(status in ('PENDING', 'DELETED', 'COMPLETED', 'IN PROGRESS')), last_updated TIMESTAMP WITHOUT TIME ZONE DEFAULT (NOW() AT TIME ZONE 'utc') NOT NULL, - PRIMARY KEY(scanID), - FOREIGN KEY (offeringId) REFERENCES osh.offerings(offeringId), - FOREIGN KEY (results) REFERENCES osh.results(resultsId) + PRIMARY KEY(scan_id), + FOREIGN KEY (offering_id) REFERENCES osh.offerings(offering_id), + FOREIGN KEY (results) REFERENCES osh.results(results_id) ); CREATE TABLE IF NOT EXISTS osh.gitscans ( id SERIAL, - buildSystemType VARCHAR(80), + build_system_type VARCHAR(80), repository VARCHAR(150), reference VARCHAR(100), - commitId VARCHAR(100), + commit_id VARCHAR(100), -- SHA256 has a length of 256 bits, so 256 bits would represent 64 hex characters hashsum VARCHAR(64), PRIMARY KEY(id) @@ -63,19 +63,19 @@ CREATE TABLE IF NOT EXISTS osh.gitscans ( CREATE TABLE IF NOT EXISTS osh.pncscans( id SERIAL, - buildSystemType VARCHAR(80), - buildId VARCHAR(100), + build_system_type VARCHAR(80), + build_id VARCHAR(100), PRIMARY KEY(id) ); CREATE TABLE IF NOT EXISTS osh.brewscans( id SERIAL, - buildSystemType VARCHAR(80), - brewId VARCHAR(100), - brewNVR VARCHAR(100), - pncId VARCHAR(100), - artifactType VARCHAR(100), - fileName VARCHAR(100), - builtfromSource BOOLEAN, + build_system_type VARCHAR(80), + brew_id VARCHAR(100), + brew_nvr VARCHAR(100), + pnc_id VARCHAR(100), + artifact_type VARCHAR(100), + file_name VARCHAR(100), + built_from_source BOOLEAN, PRIMARY KEY(id) ); diff --git a/src/main/java/dto/BrewObj.java b/src/main/java/dto/BrewObj.java index 449b9c2..878993f 100644 --- a/src/main/java/dto/BrewObj.java +++ b/src/main/java/dto/BrewObj.java @@ -15,7 +15,7 @@ import java.io.Serializable; public class BrewObj implements Serializable { public static final String SQL = "INSERT INTO brewscans " + - "(buildSystemType, brewId, brewNVR, pncId, artifactType, fileName, builtfromSource)" + + "(build_system_type, brew_id, brew_nvr, pnc_id, artifact_type, file_name, built_from_source)" + "VALUES (? ? ? ? ? ? ?)"; private String buildSystemType; diff --git a/src/main/java/dto/BrewObjPayload.java b/src/main/java/dto/BrewObjPayload.java index 57c2ad9..07f41ff 100644 --- a/src/main/java/dto/BrewObjPayload.java +++ b/src/main/java/dto/BrewObjPayload.java @@ -7,13 +7,13 @@ public class BrewObjPayload { public static BrewObj constructScanPayload(JSONObject jsonObj) throws JSONException { return new BrewObj( - jsonObj.getString("buildSystemType"), - jsonObj.getString("brewId"), - jsonObj.getString("brewNVR"), - jsonObj.getString("pncId"), - jsonObj.getString("artifactType"), - jsonObj.getString("fileName"), - jsonObj.getBoolean("builtfromSource")); + jsonObj.getString("build_system_type"), + jsonObj.getString("brew_id"), + jsonObj.getString("brew_nvr"), + jsonObj.getString("pnc_id"), + jsonObj.getString("artifact_type"), + jsonObj.getString("file_name"), + jsonObj.getBoolean("built_from_source")); } private BrewObjPayload() {} diff --git a/src/main/java/dto/ConnectDB.java b/src/main/java/dto/ConnectDB.java index ff7f535..37f977d 100644 --- a/src/main/java/dto/ConnectDB.java +++ b/src/main/java/dto/ConnectDB.java @@ -10,12 +10,14 @@ import static constants.PSGQL.user; import static constants.PSGQL.password; import static constants.PSGQL.url; +// @TODO Replace hard-coded credentials; make use of our secure db connection practice + public class ConnectDB { public Connection connect() throws JSONException { try { Connection conn = DriverManager.getConnection(url, user, password); - System.out.println("Connected to the PostgreSQL server successfully."); + System.out.println("Connected to PostgreSQL server"); return conn; } catch (SQLException e) { System.out.println(e.getMessage()); diff --git a/src/main/java/dto/GitObj.java b/src/main/java/dto/GitObj.java index 68245ed..46c7ce1 100644 --- a/src/main/java/dto/GitObj.java +++ b/src/main/java/dto/GitObj.java @@ -16,7 +16,7 @@ import java.io.Serializable; public class GitObj implements Serializable { public static final String SQL = "INSERT INTO gitscans " + - "(buildSystemType, repository, reference, commitId)" + + "(build_system_type, repository, reference, commit_id)" + "VALUES (? ? ? ?)"; private String buildSystemType; diff --git a/src/main/java/dto/GitObjPayload.java b/src/main/java/dto/GitObjPayload.java index ad8bc5b..eaabab1 100644 --- a/src/main/java/dto/GitObjPayload.java +++ b/src/main/java/dto/GitObjPayload.java @@ -7,10 +7,10 @@ public class GitObjPayload { public static GitObj constructScanPayload(JSONObject jsonObj) throws JSONException { return new GitObj( - jsonObj.getString("buildSystemType"), + jsonObj.getString("build_system_type"), jsonObj.getString("repository"), jsonObj.getString("reference"), - jsonObj.getString("commitId")); + jsonObj.getString("commit_id")); } private GitObjPayload() {} diff --git a/src/main/java/dto/PncObj.java b/src/main/java/dto/PncObj.java index 2633d93..a3a06ea 100644 --- a/src/main/java/dto/PncObj.java +++ b/src/main/java/dto/PncObj.java @@ -14,7 +14,7 @@ import java.io.Serializable; @Builder public class PncObj implements Serializable { - public static final String SQL = "INSERT INTO pncscans (buildSystemType, buildId) VALUES (? ?)"; + public static final String SQL = "INSERT INTO pncscans (build_system_type, build_id) VALUES (? ?)"; private String buildSystemType; private String buildId; diff --git a/src/main/java/dto/PncObjPayload.java b/src/main/java/dto/PncObjPayload.java index ad43edd..a8f313c 100644 --- a/src/main/java/dto/PncObjPayload.java +++ b/src/main/java/dto/PncObjPayload.java @@ -7,8 +7,8 @@ public class PncObjPayload { public static PncObj constructScanPayload(JSONObject jsonObj) throws JSONException { return new PncObj( - jsonObj.getString("buildSystemType"), - jsonObj.getString("buildId")); + jsonObj.getString("build_system_type"), + jsonObj.getString("build_id")); } private PncObjPayload() {} diff --git a/src/main/java/dto/ScanObj.java b/src/main/java/dto/ScanObj.java index 54fcc69..c7b33b8 100644 --- a/src/main/java/dto/ScanObj.java +++ b/src/main/java/dto/ScanObj.java @@ -19,12 +19,12 @@ import java.io.Serializable; public class ScanObj implements Serializable { public static final String SQL = "INSERT INTO scans " + - "(scanID, offeringId, eventID, isManagedService, componentlist) " + + "(scan_id, offering_id, event_id, is_managed_service, component_list) " + "VALUES (? ? ? ? ?)"; - public String scanId; - public String productId; - public String eventId; - public String isManagedService; - public String componentList; + private String scanId; + private String productId; + private String eventId; + private String isManagedService; + private String componentList; } \ No newline at end of file diff --git a/src/main/java/dto/ScanObjPayload.java b/src/main/java/dto/ScanObjPayload.java index c4e49e4..a914cc4 100644 --- a/src/main/java/dto/ScanObjPayload.java +++ b/src/main/java/dto/ScanObjPayload.java @@ -6,11 +6,11 @@ import org.json.JSONObject; public class ScanObjPayload { public static ScanObj constructScanPayload(JSONObject jsonObj) throws JSONException { return new ScanObj( - jsonObj.getString("scanID"), - jsonObj.getString("offeringId"), - jsonObj.getString("eventID"), - jsonObj.getString("isManagedService"), - jsonObj.getString("componentList")); + jsonObj.getString("scan_id"), + jsonObj.getString("offering_id"), + jsonObj.getString("event_id"), + jsonObj.getString("is_managed_service"), + jsonObj.getString("component_list")); } private ScanObjPayload() {} diff --git a/src/main/java/rest/CreateGetResource.java b/src/main/java/rest/CreateGetResource.java index 8f937db..d5b800b 100644 --- a/src/main/java/rest/CreateGetResource.java +++ b/src/main/java/rest/CreateGetResource.java @@ -34,7 +34,7 @@ public class CreateGetResource { public Set list(@PathParam("scanId") String scanId) { //use to return specific scanIds just use usual fetch from sets, will be querying hte db directly here ConnectDB connectDB = new ConnectDB(); - String sql = "SELECT * FROM scans WHERE scanid=?"; + String sql = "SELECT * FROM scans WHERE scan_id=?"; try(Connection conn = connectDB.connect(); PreparedStatement pstmt = conn.prepareStatement(sql)) { pstmt.setString(1, scanId); @@ -42,11 +42,11 @@ public class CreateGetResource { while (rs.next()) { //very ugly solution needs some change to where we put the query Scans.add(new ScanObj( - rs.getString("scanID"), - rs.getString("offeringId"), - rs.getString("eventID"), - rs.getString("isManagedService"), - rs.getString("componentlist"))); + rs.getString("scan_id"), + rs.getString("offering_id"), + rs.getString("event_id"), + rs.getString("is_managed_service"), + rs.getString("component_list"))); } } catch (SQLException e) { System.out.println(e.getMessage()); diff --git a/src/main/java/rest/CreateScanRequest.java b/src/main/java/rest/CreateScanRequest.java index d59c476..65325a9 100644 --- a/src/main/java/rest/CreateScanRequest.java +++ b/src/main/java/rest/CreateScanRequest.java @@ -23,7 +23,6 @@ import java.sql.SQLException; @Path("/scanRequest") public class CreateScanRequest { - // all of these need cleaning up to be a more sensible solution @RestClient CreateScanService createScanService; diff --git a/src/main/java/rest/CreateScanResource.java b/src/main/java/rest/CreateScanResource.java index 916786e..7f933e2 100644 --- a/src/main/java/rest/CreateScanResource.java +++ b/src/main/java/rest/CreateScanResource.java @@ -5,13 +5,13 @@ import dto.ScanObjPayload; import dto.ScanObj; import org.eclipse.microprofile.rest.client.inject.RestClient; +import org.json.JSONException; import org.json.JSONObject; import javax.validation.Valid; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.Path; -import java.net.URISyntaxException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; @@ -25,17 +25,17 @@ public class CreateScanResource { @POST @Consumes({ "application/json" }) //in theory should take List to clean it up - public ScanObj invokeScanAnalyze(@Valid String scanInvocation) throws URISyntaxException { + public ScanObj invokeScanAnalyze(@Valid String scanInvocation) throws JSONException { JSONObject jsonData = new JSONObject(scanInvocation); ScanObj scanObj = ScanObjPayload.constructScanPayload(jsonData); ConnectDB connectDB = new ConnectDB(); try(Connection conn = connectDB.connect(); PreparedStatement pstmt = conn.prepareStatement(ScanObj.SQL)) { - pstmt.setString(1, scanObj.scanId); - pstmt.setString(2, scanObj.productId); - pstmt.setString(3, scanObj.eventId); - pstmt.setString(4, scanObj.isManagedService); - pstmt.setString(5, scanObj.componentList); + pstmt.setString(1, scanObj.getScanId()); + pstmt.setString(2, scanObj.getProductId()); + pstmt.setString(3, scanObj.getEventId()); + pstmt.setString(4, scanObj.getIsManagedService()); + pstmt.setString(5, scanObj.getComponentList()); pstmt.executeUpdate(); } catch (SQLException e) { System.out.println(e.getMessage()); diff --git a/src/main/java/rest/CreateStartScan.java b/src/main/java/rest/CreateStartScan.java index a689716..9fd1ee9 100644 --- a/src/main/java/rest/CreateStartScan.java +++ b/src/main/java/rest/CreateStartScan.java @@ -8,7 +8,6 @@ import org.eclipse.microprofile.rest.client.inject.RestClient; import javax.ws.rs.Path; import javax.ws.rs.PUT; import javax.ws.rs.PathParam; -import java.net.URISyntaxException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -22,14 +21,14 @@ public class CreateStartScan { @PUT @Path("/{scanId}") - public ScanObj invokeScanAnalyze(@PathParam("scanId") String scanId) throws URISyntaxException { + public ScanObj invokeScanAnalyze(@PathParam("scanId") String scanId) { ScanObj finalScan = null; ConnectDB connectDB = new ConnectDB(); try (Connection conn = connectDB.connect()) { finalScan = selectDataForArchiving(conn, scanId); if (finalScan != null) { - ArchiveSelectedScans(conn, finalScan); - PostArchivingCleanup(conn, scanId); + archiveSelectedScans(conn, finalScan); + postArchivingCleanup(conn, scanId); } else { System.out.println("No data match found for scan ID=" + scanId); } @@ -41,48 +40,48 @@ public class CreateStartScan { private ScanObj selectDataForArchiving(Connection conn, String scanId) { ScanObj finalScan = null; - String qry = "SELECT * FROM scans WHERE scanid=?"; - try (PreparedStatement pstmt = conn.prepareStatement(qry)) { + String sql = "SELECT * FROM scans WHERE scan_id=?"; + try (PreparedStatement pstmt = conn.prepareStatement(sql)) { pstmt.setString(1, scanId); ResultSet rs = pstmt.executeQuery(); //TODO: need to add unique keys to DBs //fix for individual results (not resultset) finalScan = new ScanObj( - rs.getString("scanID"), - rs.getString("offeringId"), - rs.getString("eventID"), - rs.getString("isManagedService"), - rs.getString("componentlist")); + rs.getString("scan_id"), + rs.getString("offering_id"), + rs.getString("event_id"), + rs.getString("is_managed_service"), + rs.getString("component_list")); } catch (SQLException e) { System.out.println(e.getMessage()); } return finalScan; } - private void ArchiveSelectedScans(Connection conn, ScanObj finalScan) { - String qry = "INSERT INTO archive " + - "(scanID, offeringId, eventID, isManagedService, componentlist) " + + private void archiveSelectedScans(Connection conn, ScanObj finalScan) { + String sql = "INSERT INTO archive " + + "(scan_id, offering_id, event_id, is_managed_service, component_list) " + "VALUES (? ? ? ? ?)"; - try (PreparedStatement pstmt = conn.prepareStatement(qry)) { - pstmt.setString(1, finalScan.scanId); - pstmt.setString(2, finalScan.productId); - pstmt.setString(3, finalScan.eventId); - pstmt.setString(4, finalScan.isManagedService); - pstmt.setString(5, finalScan.componentList); + try (PreparedStatement pstmt = conn.prepareStatement(sql)) { + pstmt.setString(1, finalScan.getScanId()); + pstmt.setString(2, finalScan.getProductId()); + pstmt.setString(3, finalScan.getEventId()); + pstmt.setString(4, finalScan.getIsManagedService()); + pstmt.setString(5, finalScan.getComponentList()); pstmt.executeUpdate(); } catch (SQLException e) { System.out.println(e.getMessage()); } } - private void PostArchivingCleanup(Connection conn, String scanId) { + private void postArchivingCleanup(Connection conn, String scanId) { //TODO add proper checks //send task to the actual interface here using the resultset returned (should multiple scanids be allowed): //once the task is complete AND we have confirmation that the scan is done run the following sql - String qry = "DELETE FROM scans WHERE scanid=?"; - try (PreparedStatement pstmt = conn.prepareStatement(qry)) { + String sql = "DELETE FROM scans WHERE scan_id=?"; + try (PreparedStatement pstmt = conn.prepareStatement(sql)) { pstmt.setString(1, scanId); pstmt.executeUpdate(); } catch (SQLException e) { diff --git a/src/main/java/rest/RemoveScan.java b/src/main/java/rest/RemoveScan.java index 17e6cc6..6662f1e 100644 --- a/src/main/java/rest/RemoveScan.java +++ b/src/main/java/rest/RemoveScan.java @@ -20,10 +20,10 @@ public class RemoveScan { @DELETE @Path("/{scanId}") public boolean invokeScanAnalyze(@PathParam("scanId") String scanId) { - Boolean rc = false; + boolean rc = false; //send task to the actual interface here using the resultset returned (should multiple scanids be allowed): //once the task is complete AND we have confirmation that the scan is done run the following sql - String qry = "DELETE FROM scans WHERE scanid=?"; + String qry = "DELETE FROM scans WHERE scan_id=?"; ConnectDB connectDB = new ConnectDB(); try(Connection conn = connectDB.connect(); PreparedStatement pstmt = conn.prepareStatement(qry)) { diff --git a/src/test/java/dto/TestPayload.java b/src/test/java/dto/TestPayload.java index aab36ee..c5fa9c6 100644 --- a/src/test/java/dto/TestPayload.java +++ b/src/test/java/dto/TestPayload.java @@ -8,23 +8,23 @@ class TestPayload { @Test void TestBrew() { JSONObject jsonObject = new JSONObject(); - jsonObject.put("buildSystemType", "brew"); - jsonObject.put("brewId", "1"); - jsonObject.put("brewNVR", "1.1.0"); - jsonObject.put("pncId", "153"); - jsonObject.put("artifactType", "arti1"); - jsonObject.put("fileName", "myfile1"); - jsonObject.put("builtfromSource", true); + jsonObject.put("build_system_type", "brew"); + jsonObject.put("brew_id", "1"); + jsonObject.put("brew_nvr", "1.1.0"); + jsonObject.put("pnc_id", "153"); + jsonObject.put("artifact_type", "arti"); + jsonObject.put("file_name", "myfile"); + jsonObject.put("built_from_source", true); BrewObj brewObj1 = BrewObjPayload.constructScanPayload(jsonObject); BrewObj brewObj2 = new BrewObj( - jsonObject.getString("buildSystemType"), - jsonObject.getString("brewId"), - jsonObject.getString("brewNVR"), - jsonObject.getString("pncId"), - jsonObject.getString("artifactType"), - jsonObject.getString("fileName"), - jsonObject.getBoolean("builtfromSource")); + jsonObject.getString("build_system_type"), + jsonObject.getString("brew_id"), + jsonObject.getString("brew_nvr"), + jsonObject.getString("pnc_id"), + jsonObject.getString("artifact_type"), + jsonObject.getString("file_name"), + jsonObject.getBoolean("built_from_source")); System.out.println("BrewObj1: " + brewObj1.toString()); System.out.println("BrewObj2: " + brewObj2.toString()); assert(brewObj1.getBuildSystemType().equals(brewObj2.getBuildSystemType())); @@ -39,17 +39,17 @@ class TestPayload { @Test void TestGit() { JSONObject jsonObject = new JSONObject(); - jsonObject.put("buildSystemType", "git"); + jsonObject.put("build_system_type", "git"); jsonObject.put("repository", "repo"); jsonObject.put("reference", "ref"); - jsonObject.put("commitId", "comid"); + jsonObject.put("commit_id", "c6385a754421a57cd0a26ccba187cd687c8d1258"); GitObj gitObj1 = GitObjPayload.constructScanPayload(jsonObject); GitObj gitObj2 = new GitObj( - jsonObject.getString("buildSystemType"), + jsonObject.getString("build_system_type"), jsonObject.getString("repository"), jsonObject.getString("reference"), - jsonObject.getString("commitId")); + jsonObject.getString("commit_id")); System.out.println("GitObj1: " + gitObj1.toString()); System.out.println("GitObj2: " + gitObj2.toString()); assert(gitObj1.getBuildSystemType().equals(gitObj2.getBuildSystemType())); @@ -61,15 +61,42 @@ class TestPayload { @Test void TestPnc() { JSONObject jsonObject = new JSONObject(); - jsonObject.put("buildSystemType", "pnc"); - jsonObject.put("buildId", "153"); + jsonObject.put("build_system_type", "pnc"); + jsonObject.put("build_id", "153"); PncObj pncObj1 = PncObjPayload.constructScanPayload(jsonObject); - PncObj pncObj2 = new PncObj(jsonObject.getString("buildSystemType"), jsonObject.getString("buildId")); + PncObj pncObj2 = new PncObj( + jsonObject.getString("build_system_type"), + jsonObject.getString("build_id")); System.out.println("PncObj1: " + pncObj1.toString()); System.out.println("PncObj2: " + pncObj2.toString()); assert(pncObj1.getBuildSystemType().equals(pncObj2.getBuildSystemType())); assert(pncObj1.getBuildId().equals(pncObj2.getBuildId())); } + @Test + void TestScan() { + JSONObject jsonObject = new JSONObject(); + jsonObject.put("scan_id", "ABC"); + jsonObject.put("offering_id", "product#"); + jsonObject.put("event_id", "event#"); + jsonObject.put("is_managed_service", "TRUE"); + jsonObject.put("component_list", "components"); + + ScanObj scanObj1 = ScanObjPayload.constructScanPayload(jsonObject); + ScanObj scanObj2 = new ScanObj( + jsonObject.getString("scan_id"), + jsonObject.getString("offering_id"), + jsonObject.getString("event_id"), + jsonObject.getString("is_managed_service"), + jsonObject.getString("component_list")); + System.out.println("ScanObj1: " + scanObj1.toString()); + System.out.println("ScanObj2: " + scanObj2.toString()); + assert(scanObj1.getScanId().equals(scanObj2.getScanId())); + assert(scanObj1.getProductId().equals(scanObj2.getProductId())); + assert(scanObj1.getEventId().equals(scanObj2.getEventId())); + assert(scanObj1.getIsManagedService().equals(scanObj2.getIsManagedService())); + assert(scanObj1.getComponentList().equals(scanObj2.getComponentList())); + } + } From a178a7fc18439e2ac45d97caa409575cee1f175c Mon Sep 17 00:00:00 2001 From: Leonid Bossis Date: Fri, 9 Jun 2023 16:33:36 -0400 Subject: [PATCH 16/26] add logging facility --- src/main/java/rest/CreateGetResource.java | 10 +++++---- src/main/java/rest/CreateScanRequest.java | 18 +++++++++------ src/main/java/rest/CreateScanResource.java | 8 +++++-- src/main/java/rest/CreateStartScan.java | 26 +++++++++++++--------- src/main/java/rest/RemoveScan.java | 7 +++++- src/test/java/dto/TestPayload.java | 22 ++++++++++-------- 6 files changed, 57 insertions(+), 34 deletions(-) diff --git a/src/main/java/rest/CreateGetResource.java b/src/main/java/rest/CreateGetResource.java index d5b800b..17f33de 100644 --- a/src/main/java/rest/CreateGetResource.java +++ b/src/main/java/rest/CreateGetResource.java @@ -6,6 +6,8 @@ import java.util.Set; import dto.ScanObj; import dto.ConnectDB; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.ws.rs.GET; import javax.ws.rs.Path; @@ -18,15 +20,15 @@ import java.sql.SQLException; // @Path("/api/v1/[osh-scan]") @Path("/scanGet") public class CreateGetResource { - // @Inject - // EntityManager em; + + private static final Logger logger = LoggerFactory.getLogger(CreateGetResource.class); CreateScanService createScanService; private Set Scans = Collections.newSetFromMap(Collections.synchronizedMap(new LinkedHashMap<>())); public CreateGetResource() { - + // LDB: @TODO either put some code here or remove this not used public constructor } @GET @@ -49,7 +51,7 @@ public class CreateGetResource { rs.getString("component_list"))); } } catch (SQLException e) { - System.out.println(e.getMessage()); + logger.error(e.getMessage()); } return Scans; } diff --git a/src/main/java/rest/CreateScanRequest.java b/src/main/java/rest/CreateScanRequest.java index 65325a9..7e47d8c 100644 --- a/src/main/java/rest/CreateScanRequest.java +++ b/src/main/java/rest/CreateScanRequest.java @@ -9,13 +9,15 @@ import dto.PncObj; import dto.PncObjPayload; import org.eclipse.microprofile.rest.client.inject.RestClient; +import org.json.JSONException; import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.validation.Valid; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.Path; -import java.net.URISyntaxException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; @@ -23,6 +25,8 @@ import java.sql.SQLException; @Path("/scanRequest") public class CreateScanRequest { + private static final Logger logger = LoggerFactory.getLogger(CreateScanRequest.class); + @RestClient CreateScanService createScanService; @@ -30,7 +34,7 @@ public class CreateScanRequest { @Path("/brew") @Consumes({ "application/json" }) // in theory should take List to clean it up - public BrewObj invokeBrewScanAnalyze(@Valid String scanInvocation) throws URISyntaxException { + public BrewObj invokeBrewScanAnalyze(@Valid String scanInvocation) throws JSONException { JSONObject jsonData = new JSONObject(scanInvocation); BrewObj brewObj = BrewObjPayload.constructScanPayload(jsonData); ConnectDB connectDB = new ConnectDB(); @@ -45,7 +49,7 @@ public class CreateScanRequest { pstmt.setBoolean(7, brewObj.getBuiltFromSource()); pstmt.executeUpdate(); } catch (SQLException e) { - System.out.println(e.getMessage()); + logger.error(e.getMessage()); } return brewObj; } @@ -53,7 +57,7 @@ public class CreateScanRequest { @POST @Path("/git") @Consumes({ "application/json" }) - public GitObj invokeGitScanAnalyze(@Valid String scanInvocation)throws URISyntaxException { + public GitObj invokeGitScanAnalyze(@Valid String scanInvocation)throws JSONException { JSONObject jsonData = new JSONObject(scanInvocation); GitObj gitObj = GitObjPayload.constructScanPayload(jsonData); ConnectDB connectDB = new ConnectDB(); @@ -65,7 +69,7 @@ public class CreateScanRequest { pstmt.setString(4, gitObj.getCommitId()); pstmt.executeUpdate(); } catch (SQLException e) { - System.out.println(e.getMessage()); + logger.error(e.getMessage()); } return gitObj; } @@ -73,7 +77,7 @@ public class CreateScanRequest { @POST @Path("/pnc") @Consumes({ "application/json" }) - public PncObj invokePncScanAnalyze(@Valid String scanInvocation)throws URISyntaxException { + public PncObj invokePncScanAnalyze(@Valid String scanInvocation)throws JSONException { JSONObject jsonData = new JSONObject(scanInvocation); PncObj pncObj = PncObjPayload.constructScanPayload(jsonData); ConnectDB connectDB = new ConnectDB(); @@ -83,7 +87,7 @@ public class CreateScanRequest { pstmt.setString(2, pncObj.getBuildId()); pstmt.executeUpdate(); } catch (SQLException e) { - System.out.println(e.getMessage()); + logger.error(e.getMessage()); } return pncObj; } diff --git a/src/main/java/rest/CreateScanResource.java b/src/main/java/rest/CreateScanResource.java index 7f933e2..1ca03ce 100644 --- a/src/main/java/rest/CreateScanResource.java +++ b/src/main/java/rest/CreateScanResource.java @@ -7,6 +7,8 @@ import dto.ScanObj; import org.eclipse.microprofile.rest.client.inject.RestClient; import org.json.JSONException; import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.validation.Valid; import javax.ws.rs.Consumes; @@ -19,6 +21,8 @@ import java.sql.SQLException; @Path("/") public class CreateScanResource { + private static final Logger logger = LoggerFactory.getLogger(CreateScanResource.class); + @RestClient CreateScanService createScanService; @@ -38,8 +42,8 @@ public class CreateScanResource { pstmt.setString(5, scanObj.getComponentList()); pstmt.executeUpdate(); } catch (SQLException e) { - System.out.println(e.getMessage()); - } + logger.error(e.getMessage()); + } return scanObj; } } diff --git a/src/main/java/rest/CreateStartScan.java b/src/main/java/rest/CreateStartScan.java index 9fd1ee9..0aade1d 100644 --- a/src/main/java/rest/CreateStartScan.java +++ b/src/main/java/rest/CreateStartScan.java @@ -4,6 +4,8 @@ import dto.ConnectDB; import dto.ScanObj; import org.eclipse.microprofile.rest.client.inject.RestClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.ws.rs.Path; import javax.ws.rs.PUT; @@ -16,6 +18,8 @@ import java.sql.SQLException; @Path("/startScan") public class CreateStartScan { + private static final Logger logger = LoggerFactory.getLogger(CreateStartScan.class); + @RestClient CreateScanService createScanService; @@ -30,10 +34,10 @@ public class CreateStartScan { archiveSelectedScans(conn, finalScan); postArchivingCleanup(conn, scanId); } else { - System.out.println("No data match found for scan ID=" + scanId); + logger.warn("No data match found for scan ID=" + scanId); } } catch (SQLException e) { - System.out.println(e.getMessage()); + logger.error(e.getMessage()); } return finalScan; } @@ -45,8 +49,8 @@ public class CreateStartScan { pstmt.setString(1, scanId); ResultSet rs = pstmt.executeQuery(); - //TODO: need to add unique keys to DBs - //fix for individual results (not resultset) + // TODO: need to add unique keys to DBs + // fix for individual results (not resultset) finalScan = new ScanObj( rs.getString("scan_id"), rs.getString("offering_id"), @@ -54,7 +58,7 @@ public class CreateStartScan { rs.getString("is_managed_service"), rs.getString("component_list")); } catch (SQLException e) { - System.out.println(e.getMessage()); + logger.error(e.getMessage()); } return finalScan; } @@ -71,21 +75,21 @@ public class CreateStartScan { pstmt.setString(5, finalScan.getComponentList()); pstmt.executeUpdate(); } catch (SQLException e) { - System.out.println(e.getMessage()); + logger.error(e.getMessage()); } } private void postArchivingCleanup(Connection conn, String scanId) { - //TODO add proper checks - //send task to the actual interface here using the resultset returned (should multiple scanids be allowed): - //once the task is complete AND we have confirmation that the scan is done run the following sql + // TODO add proper checks + // send task to the actual interface here using the resultset returned (should multiple scanids be allowed): + // once the task is complete AND we have confirmation that the scan is done run the following sql String sql = "DELETE FROM scans WHERE scan_id=?"; try (PreparedStatement pstmt = conn.prepareStatement(sql)) { pstmt.setString(1, scanId); pstmt.executeUpdate(); } catch (SQLException e) { - System.out.println(e.getMessage()); + logger.error(e.getMessage()); } } -} \ No newline at end of file +} diff --git a/src/main/java/rest/RemoveScan.java b/src/main/java/rest/RemoveScan.java index 6662f1e..783873c 100644 --- a/src/main/java/rest/RemoveScan.java +++ b/src/main/java/rest/RemoveScan.java @@ -3,6 +3,8 @@ package rest; import dto.ConnectDB; import org.eclipse.microprofile.rest.client.inject.RestClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.ws.rs.Path; import javax.ws.rs.DELETE; @@ -13,6 +15,9 @@ import java.sql.SQLException; @Path("/deleteScan") public class RemoveScan { + + private static final Logger logger = LoggerFactory.getLogger(RemoveScan.class); + // @Inject @RestClient CreateScanService createScanService; @@ -31,7 +36,7 @@ public class RemoveScan { pstmt.executeUpdate(); rc = true; } catch (SQLException e) { - System.out.println(e.getMessage()); + logger.error(e.getMessage()); } return rc; } diff --git a/src/test/java/dto/TestPayload.java b/src/test/java/dto/TestPayload.java index c5fa9c6..0046af0 100644 --- a/src/test/java/dto/TestPayload.java +++ b/src/test/java/dto/TestPayload.java @@ -2,9 +2,13 @@ package dto; import org.json.JSONObject; import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; class TestPayload { + private static final Logger logger = LoggerFactory.getLogger(TestPayload.class); + @Test void TestBrew() { JSONObject jsonObject = new JSONObject(); @@ -25,8 +29,9 @@ class TestPayload { jsonObject.getString("artifact_type"), jsonObject.getString("file_name"), jsonObject.getBoolean("built_from_source")); - System.out.println("BrewObj1: " + brewObj1.toString()); - System.out.println("BrewObj2: " + brewObj2.toString()); + + logger.info("BrewObj1: " + brewObj1.toString()); + logger.info("BrewObj2: " + brewObj2.toString()); assert(brewObj1.getBuildSystemType().equals(brewObj2.getBuildSystemType())); assert(brewObj1.getBrewId().equals(brewObj2.getBrewId())); assert(brewObj1.getBrewNvr().equals(brewObj2.getBrewNvr())); @@ -50,8 +55,8 @@ class TestPayload { jsonObject.getString("repository"), jsonObject.getString("reference"), jsonObject.getString("commit_id")); - System.out.println("GitObj1: " + gitObj1.toString()); - System.out.println("GitObj2: " + gitObj2.toString()); + logger.info("GitObj1: " + gitObj1.toString()); + logger.info("GitObj2: " + gitObj2.toString()); assert(gitObj1.getBuildSystemType().equals(gitObj2.getBuildSystemType())); assert(gitObj1.getRepository().equals(gitObj2.getRepository())); assert(gitObj1.getReference().equals(gitObj2.getReference())); @@ -68,8 +73,8 @@ class TestPayload { PncObj pncObj2 = new PncObj( jsonObject.getString("build_system_type"), jsonObject.getString("build_id")); - System.out.println("PncObj1: " + pncObj1.toString()); - System.out.println("PncObj2: " + pncObj2.toString()); + logger.info("PncObj1: " + pncObj1.toString()); + logger.info("PncObj2: " + pncObj2.toString()); assert(pncObj1.getBuildSystemType().equals(pncObj2.getBuildSystemType())); assert(pncObj1.getBuildId().equals(pncObj2.getBuildId())); } @@ -90,13 +95,12 @@ class TestPayload { jsonObject.getString("event_id"), jsonObject.getString("is_managed_service"), jsonObject.getString("component_list")); - System.out.println("ScanObj1: " + scanObj1.toString()); - System.out.println("ScanObj2: " + scanObj2.toString()); + logger.info("ScanObj1: " + scanObj1.toString()); + logger.info("ScanObj2: " + scanObj2.toString()); assert(scanObj1.getScanId().equals(scanObj2.getScanId())); assert(scanObj1.getProductId().equals(scanObj2.getProductId())); assert(scanObj1.getEventId().equals(scanObj2.getEventId())); assert(scanObj1.getIsManagedService().equals(scanObj2.getIsManagedService())); assert(scanObj1.getComponentList().equals(scanObj2.getComponentList())); } - } From 63fef64f31e5aa5252c412a1b1e92d529cef57fb Mon Sep 17 00:00:00 2001 From: Leonid Bossis Date: Sun, 11 Jun 2023 15:35:42 -0400 Subject: [PATCH 17/26] checkpoint #2 --- src/main/java/dto/BrewObj.java | 20 ++++++++--------- src/main/java/dto/GitObj.java | 15 ++++++------- src/main/java/dto/PncObj.java | 10 ++++----- src/main/java/dto/ScanObj.java | 17 +++++++-------- src/test/java/dto/TestPayload.java | 35 +++++++++++++++--------------- 5 files changed, 48 insertions(+), 49 deletions(-) diff --git a/src/main/java/dto/BrewObj.java b/src/main/java/dto/BrewObj.java index 878993f..f711502 100644 --- a/src/main/java/dto/BrewObj.java +++ b/src/main/java/dto/BrewObj.java @@ -7,22 +7,22 @@ import lombok.ToString; import lombok.extern.jackson.Jacksonized; import java.io.Serializable; -@ToString -@Getter @AllArgsConstructor -@Jacksonized @Builder +@Getter +@ToString +@Jacksonized public class BrewObj implements Serializable { public static final String SQL = "INSERT INTO brewscans " + "(build_system_type, brew_id, brew_nvr, pnc_id, artifact_type, file_name, built_from_source)" + "VALUES (? ? ? ? ? ? ?)"; - private String buildSystemType; - private String brewId; - private String brewNvr; - private String pncId; - private String artifactType; - private String fileName; - private Boolean builtFromSource; + private final String buildSystemType; + private final String brewId; + private final String brewNvr; + private final String pncId; + private final String artifactType; + private final String fileName; + private final Boolean builtFromSource; } diff --git a/src/main/java/dto/GitObj.java b/src/main/java/dto/GitObj.java index 46c7ce1..81f8d3f 100644 --- a/src/main/java/dto/GitObj.java +++ b/src/main/java/dto/GitObj.java @@ -5,22 +5,21 @@ import lombok.Builder; import lombok.Getter; import lombok.ToString; import lombok.extern.jackson.Jacksonized; - import java.io.Serializable; -@ToString -@Getter @AllArgsConstructor -@Jacksonized @Builder +@Getter +@ToString +@Jacksonized public class GitObj implements Serializable { public static final String SQL = "INSERT INTO gitscans " + "(build_system_type, repository, reference, commit_id)" + "VALUES (? ? ? ?)"; - private String buildSystemType; - private String repository; - private String reference; - private String commitId; + private final String buildSystemType; + private final String repository; + private final String reference; + private final String commitId; } \ No newline at end of file diff --git a/src/main/java/dto/PncObj.java b/src/main/java/dto/PncObj.java index a3a06ea..14b9f7d 100644 --- a/src/main/java/dto/PncObj.java +++ b/src/main/java/dto/PncObj.java @@ -7,15 +7,15 @@ import lombok.ToString; import lombok.extern.jackson.Jacksonized; import java.io.Serializable; -@ToString -@Getter @AllArgsConstructor -@Jacksonized @Builder +@Getter +@ToString +@Jacksonized public class PncObj implements Serializable { public static final String SQL = "INSERT INTO pncscans (build_system_type, build_id) VALUES (? ?)"; - private String buildSystemType; - private String buildId; + private final String buildSystemType; + private final String buildId; } \ No newline at end of file diff --git a/src/main/java/dto/ScanObj.java b/src/main/java/dto/ScanObj.java index c7b33b8..8cf61f2 100644 --- a/src/main/java/dto/ScanObj.java +++ b/src/main/java/dto/ScanObj.java @@ -5,26 +5,25 @@ import lombok.Builder; import lombok.Getter; import lombok.ToString; import lombok.extern.jackson.Jacksonized; - import java.io.Serializable; //still need to fix all the scan objects to be significantly less poorly written //TODO add interface for the scan objects (is probably the cleanest solution) -@ToString -@Getter @AllArgsConstructor -@Jacksonized @Builder +@Getter +@ToString +@Jacksonized public class ScanObj implements Serializable { public static final String SQL = "INSERT INTO scans " + "(scan_id, offering_id, event_id, is_managed_service, component_list) " + "VALUES (? ? ? ? ?)"; - private String scanId; - private String productId; - private String eventId; - private String isManagedService; - private String componentList; + private final String scanId; + private final String productId; + private final String eventId; + private final String isManagedService; + private final String componentList; } \ No newline at end of file diff --git a/src/test/java/dto/TestPayload.java b/src/test/java/dto/TestPayload.java index 0046af0..51980ac 100644 --- a/src/test/java/dto/TestPayload.java +++ b/src/test/java/dto/TestPayload.java @@ -4,6 +4,7 @@ import org.json.JSONObject; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.junit.jupiter.api.Assertions.*; class TestPayload { @@ -32,12 +33,12 @@ class TestPayload { logger.info("BrewObj1: " + brewObj1.toString()); logger.info("BrewObj2: " + brewObj2.toString()); - assert(brewObj1.getBuildSystemType().equals(brewObj2.getBuildSystemType())); - assert(brewObj1.getBrewId().equals(brewObj2.getBrewId())); - assert(brewObj1.getBrewNvr().equals(brewObj2.getBrewNvr())); - assert(brewObj1.getPncId().equals(brewObj2.getPncId())); - assert(brewObj1.getArtifactType().equals(brewObj2.getArtifactType())); - assert(brewObj1.getFileName().equals(brewObj2.getFileName())); + assertEquals(brewObj1.getBuildSystemType(), brewObj2.getBuildSystemType()); + assertEquals(brewObj1.getBrewId(), brewObj2.getBrewId()); + assertEquals(brewObj1.getBrewNvr(), brewObj2.getBrewNvr()); + assertEquals(brewObj1.getPncId(), brewObj2.getPncId()); + assertEquals(brewObj1.getArtifactType(), brewObj2.getArtifactType()); + assertEquals(brewObj1.getFileName(), brewObj2.getFileName()); assert(brewObj1.getBuiltFromSource() == brewObj2.getBuiltFromSource()); } @@ -57,10 +58,10 @@ class TestPayload { jsonObject.getString("commit_id")); logger.info("GitObj1: " + gitObj1.toString()); logger.info("GitObj2: " + gitObj2.toString()); - assert(gitObj1.getBuildSystemType().equals(gitObj2.getBuildSystemType())); - assert(gitObj1.getRepository().equals(gitObj2.getRepository())); - assert(gitObj1.getReference().equals(gitObj2.getReference())); - assert(gitObj1.getCommitId().equals(gitObj2.getCommitId())); + assertEquals(gitObj1.getBuildSystemType(), gitObj2.getBuildSystemType()); + assertEquals(gitObj1.getRepository(), gitObj2.getRepository()); + assertEquals(gitObj1.getReference(), gitObj2.getReference()); + assertEquals(gitObj1.getCommitId(), gitObj2.getCommitId()); } @Test @@ -75,8 +76,8 @@ class TestPayload { jsonObject.getString("build_id")); logger.info("PncObj1: " + pncObj1.toString()); logger.info("PncObj2: " + pncObj2.toString()); - assert(pncObj1.getBuildSystemType().equals(pncObj2.getBuildSystemType())); - assert(pncObj1.getBuildId().equals(pncObj2.getBuildId())); + assertEquals(pncObj1.getBuildSystemType(), pncObj2.getBuildSystemType()); + assertEquals(pncObj1.getBuildId(), pncObj2.getBuildId()); } @Test @@ -97,10 +98,10 @@ class TestPayload { jsonObject.getString("component_list")); logger.info("ScanObj1: " + scanObj1.toString()); logger.info("ScanObj2: " + scanObj2.toString()); - assert(scanObj1.getScanId().equals(scanObj2.getScanId())); - assert(scanObj1.getProductId().equals(scanObj2.getProductId())); - assert(scanObj1.getEventId().equals(scanObj2.getEventId())); - assert(scanObj1.getIsManagedService().equals(scanObj2.getIsManagedService())); - assert(scanObj1.getComponentList().equals(scanObj2.getComponentList())); + assertEquals(scanObj1.getScanId(), scanObj2.getScanId()); + assertEquals(scanObj1.getProductId(), scanObj2.getProductId()); + assertEquals(scanObj1.getEventId(), scanObj2.getEventId()); + assertEquals(scanObj1.getIsManagedService(), scanObj2.getIsManagedService()); + assertEquals(scanObj1.getComponentList(), scanObj2.getComponentList()); } } From 10812884187d4b5e0e5a96799b86366d002759cc Mon Sep 17 00:00:00 2001 From: Jonathan Christison Date: Mon, 12 Jun 2023 17:37:27 +0100 Subject: [PATCH 18/26] Hacky attempt at adding DB Dev services for local development --- pom.xml | 145 +++++++++------------- src/main/resources/application.properties | 5 +- 2 files changed, 60 insertions(+), 90 deletions(-) diff --git a/pom.xml b/pom.xml index a884d11..77233b1 100644 --- a/pom.xml +++ b/pom.xml @@ -1,14 +1,6 @@ - - - jboss - JBoss repository - http://repository.jboss.org/maven2 - - - 4.0.0 com.redhat.ncaughey rest-json-quickstart @@ -33,59 +25,35 @@ pom import - - - - - - - - io.quarkus - quarkus-openshift - - - org.json - json - 20220320 - - - - org.postgresql - postgresql - 42.6.0 - - - - - - - - org.hibernate - hibernate-core + + io.quarkus + quarkus-openshift + + + org.json + json + 20220320 + + + org.postgresql + postgresql + 42.6.0 - - org.glassfish.jaxb - jaxb-runtime + + org.hibernate + hibernate-core + + + org.glassfish.jaxb + jaxb-runtime + + + io.quarkus + quarkus-jdbc-postgresql - - - - - io.quarkus - quarkus-jdbc-postgresql - - io.quarkus quarkus-resteasy-reactive-jackson @@ -94,45 +62,44 @@ io.quarkus quarkus-arc + + io.quarkus + quarkus-agroal + io.quarkus quarkus-junit5 test - - org.projectlombok - lombok - 1.18.26 - provided - - - - - javax.validation - validation-api - 1.0.0.GA - - - - jakarta.persistence - jakarta.persistence-api - 3.1.0 - - - - - org.eclipse.microprofile.rest.client - microprofile-rest-client-api - 3.0.1 - - - - + + org.projectlombok + lombok + 1.18.26 + provided + + + javax.validation + validation-api + 1.0.0.GA + + + jakarta.persistence + jakarta.persistence-api + 3.1.0 + + + org.eclipse.microprofile.rest.client + microprofile-rest-client-api + 3.0.1 + + + + jboss + JBoss repository + http://repository.jboss.org/maven2 + + diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index eca88b0..9de352c 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -4,4 +4,7 @@ # couchdb.name=scan-results # couchdb.url=https://localhost:5984 -# quarkus.hibernate-orm.database.generation=drop-and-create \ No newline at end of file +# quarkus.hibernate-orm.database.generation=drop-and-create +quarkus.datasource.devservices.enabled=true +quarkus.datasource.db-kind=postgresql +quarkus.datasource.devservices.volumes."/local/test/data"=/var/lib/postgresql/data From 4526231088cedce1e5130b3d0ac86831adea694b Mon Sep 17 00:00:00 2001 From: Jonathan Christison Date: Tue, 13 Jun 2023 12:08:28 +0100 Subject: [PATCH 19/26] Secure volume mount example --- src/main/resources/application.properties | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index d698fc5..bf159e3 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -6,7 +6,20 @@ # quarkus.hibernate-orm.database.generation=drop-and-create -quarkus.kerberos.keytab-path= HTTP_osh-pct-security-tooling.apps.ocp-c1.prod.psi.redhat.com@IPA.REDHAT.COM.keytab -quarkus.kerberos.service-principal-name= HTTP/osh-pct-security-tooling.apps.ocp-c1.prod.psi.redhat.com@IPA.REDHAT.COM +%dev.quarkus.kerberos.keytab-path= HTTP_osh-pct-security-tooling.apps.ocp-c1.prod.psi.redhat.com@IPA.REDHAT.COM.keytab +%dev.quarkus.kerberos.service-principal-name= HTTP/osh-pct-security-tooling.apps.ocp-c1.prod.psi.redhat.com@IPA.REDHAT.COM + +%stage.quarkus.openshift.name=osh-stage +%stage.quarkus.openshift.labels.env=stage +%stage.quarkus.openshift.route.expose=true + +########################################## +# Kerberos Specifics # +########################################## +%stage.quarkus.openshift.secret-volumes.osh-wrapper.secret-name=kerberos-keytab-osh +%stage.quarkus.openshift.mounts.osh-wrapper.path=/kerberos +%stage.quarkus.openshift.mounts.osh-wrapper.read-only=true +%stage.quarkus.kerberos.keytab-path= /kerberos/kerberos-keytab-osh +%stage.quarkus.kerberos.service-principal-name= HTTP/osh-pct-security-tooling.apps.ocp-c1.prod.psi.redhat.com@IPA.REDHAT.COM From 2e38ec0461622dc03f1c5caf3880279e1c82a8c7 Mon Sep 17 00:00:00 2001 From: Jonathan Christison Date: Tue, 13 Jun 2023 17:21:25 +0100 Subject: [PATCH 20/26] Add krb5.conf to container as config map --- k8s/kerberos-config.yaml | 43 +++++++++++++++++++++++ src/main/resources/application.properties | 8 +++++ 2 files changed, 51 insertions(+) create mode 100644 k8s/kerberos-config.yaml diff --git a/k8s/kerberos-config.yaml b/k8s/kerberos-config.yaml new file mode 100644 index 0000000..e449e38 --- /dev/null +++ b/k8s/kerberos-config.yaml @@ -0,0 +1,43 @@ +#wget https://gitlab.corp.redhat.com/it-iam/system-configs/raw/master/krb5/idm/linux-krb5.conf && oc create configmap kerberos-config --from-file=linux-krb5.conf --dry-run=client -o yaml > kerberos-config.yaml +apiVersion: v1 +data: + linux-krb5.conf: | + includedir /etc/krb5.conf.d/ + + # depending on your config, you may wish to uncomment the following: + # includedir /var/lib/sss/pubconf/krb5.include.d/ + + [libdefaults] + default_realm = IPA.REDHAT.COM + dns_lookup_realm = true + dns_lookup_kdc = true + rdns = false + dns_canonicalize_hostname = false + ticket_lifetime = 24h + forwardable = true + udp_preference_limit = 0 + default_ccache_name = KEYRING:persistent:%{uid} + + [realms] + + REDHAT.COM = { + default_domain = redhat.com + dns_lookup_kdc = true + master_kdc = kerberos.corp.redhat.com + admin_server = kerberos.corp.redhat.com + } + + IPA.REDHAT.COM = { + default_domain = ipa.redhat.com + dns_lookup_kdc = true + # Trust tickets issued by legacy realm on this host + auth_to_local = RULE:[1:$1@$0](.*@REDHAT\.COM)s/@.*// + auth_to_local = DEFAULT + } + + #DO NOT ADD A [domain_realms] section + #https://mojo.redhat.com/docs/DOC-1166841 +kind: ConfigMap +metadata: + creationTimestamp: null + name: kerberos-config diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index bf159e3..4c3d438 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -22,4 +22,12 @@ %stage.quarkus.kerberos.keytab-path= /kerberos/kerberos-keytab-osh %stage.quarkus.kerberos.service-principal-name= HTTP/osh-pct-security-tooling.apps.ocp-c1.prod.psi.redhat.com@IPA.REDHAT.COM +%stage.quarkus.openshift.mounts.osh-wrapper-config-vol.path=/etc/krb5.conf +%stage.quarkus.openshift.mounts.osh-wrapper-config-vol.sub-path=linux-krb5.conf +%stage.quarkus.openshift.config-map-volumes.osh-wrapper-config-vol.config-map-name=kerberos-config +%stage.quarkus.openshift.config-map-volumes.osh-wrapper-config-vol.items."linux-krb5.conf".path=linux-krb5.conf +%stage.quarkus.openshift.mounts.osh-wrapper-config-vol.read-only=true + + +%stage.quarkus.log.level=DEBUG From e3fcecac060a00c251095c26cd28baea3d4d6690 Mon Sep 17 00:00:00 2001 From: Jonathan Christison Date: Tue, 13 Jun 2023 18:17:34 +0100 Subject: [PATCH 21/26] Change to osh rather than osh-stage Kerberos is tied to `osh-pct-security-tooling.apps.ocp-c1.prod.psi.redhat.com` not `osh-stage-pct-security-tooling.apps.ocp-c1.prod.psi.redhat.com` --- src/main/resources/application.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 4c3d438..92cde6d 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -9,7 +9,7 @@ %dev.quarkus.kerberos.keytab-path= HTTP_osh-pct-security-tooling.apps.ocp-c1.prod.psi.redhat.com@IPA.REDHAT.COM.keytab %dev.quarkus.kerberos.service-principal-name= HTTP/osh-pct-security-tooling.apps.ocp-c1.prod.psi.redhat.com@IPA.REDHAT.COM -%stage.quarkus.openshift.name=osh-stage +%stage.quarkus.openshift.name=osh %stage.quarkus.openshift.labels.env=stage %stage.quarkus.openshift.route.expose=true From b1942b512ae5444a9fd448fcfc65e7be0e33a9d8 Mon Sep 17 00:00:00 2001 From: Jonathan Christison Date: Wed, 14 Jun 2023 10:35:37 +0100 Subject: [PATCH 22/26] Change kerberos settings --- k8s/kerberos-config.yaml | 7 ++++--- k8s/linux-krb5.conf | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 3 deletions(-) create mode 100644 k8s/linux-krb5.conf diff --git a/k8s/kerberos-config.yaml b/k8s/kerberos-config.yaml index e449e38..786f28c 100644 --- a/k8s/kerberos-config.yaml +++ b/k8s/kerberos-config.yaml @@ -1,4 +1,4 @@ -#wget https://gitlab.corp.redhat.com/it-iam/system-configs/raw/master/krb5/idm/linux-krb5.conf && oc create configmap kerberos-config --from-file=linux-krb5.conf --dry-run=client -o yaml > kerberos-config.yaml +#oc create configmap kerberos-config --from-file=linux-krb5.conf --dry-run=client -o yaml > kerberos-config.yaml apiVersion: v1 data: linux-krb5.conf: | @@ -15,8 +15,10 @@ data: dns_canonicalize_hostname = false ticket_lifetime = 24h forwardable = true - udp_preference_limit = 0 + udp_preference_limit = 1 default_ccache_name = KEYRING:persistent:%{uid} + max_retries = 1 + kdc_timeout = 1500 [realms] @@ -34,7 +36,6 @@ data: auth_to_local = RULE:[1:$1@$0](.*@REDHAT\.COM)s/@.*// auth_to_local = DEFAULT } - #DO NOT ADD A [domain_realms] section #https://mojo.redhat.com/docs/DOC-1166841 kind: ConfigMap diff --git a/k8s/linux-krb5.conf b/k8s/linux-krb5.conf new file mode 100644 index 0000000..701d438 --- /dev/null +++ b/k8s/linux-krb5.conf @@ -0,0 +1,36 @@ +includedir /etc/krb5.conf.d/ + +# depending on your config, you may wish to uncomment the following: +# includedir /var/lib/sss/pubconf/krb5.include.d/ + +[libdefaults] + default_realm = IPA.REDHAT.COM + dns_lookup_realm = true + dns_lookup_kdc = true + rdns = false + dns_canonicalize_hostname = false + ticket_lifetime = 24h + forwardable = true + udp_preference_limit = 1 + default_ccache_name = KEYRING:persistent:%{uid} + max_retries = 1 + kdc_timeout = 1500 + +[realms] + + REDHAT.COM = { + default_domain = redhat.com + dns_lookup_kdc = true + master_kdc = kerberos.corp.redhat.com + admin_server = kerberos.corp.redhat.com + } + + IPA.REDHAT.COM = { + default_domain = ipa.redhat.com + dns_lookup_kdc = true + # Trust tickets issued by legacy realm on this host + auth_to_local = RULE:[1:$1@$0](.*@REDHAT\.COM)s/@.*// + auth_to_local = DEFAULT + } +#DO NOT ADD A [domain_realms] section +#https://mojo.redhat.com/docs/DOC-1166841 From c15a0c5ee1fbf981a068ae137173986c9374f838 Mon Sep 17 00:00:00 2001 From: Jonathan Christison Date: Wed, 14 Jun 2023 11:12:18 +0100 Subject: [PATCH 23/26] Add example deploy and set TLS to edge --- src/main/resources/application.properties | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 92cde6d..cfe76d7 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -1,3 +1,4 @@ +#Example deploy - mvn deploy -Dquarkus.profile=stage -Dquarkus.kubernetes.deploy=true # quarkus.rest-client."rest.CreateScanService".url=https://localhost:8080/ # quarkus.rest-client."rest.CreateScanService".scope=javax.inject.Singleton @@ -11,6 +12,7 @@ %stage.quarkus.openshift.name=osh %stage.quarkus.openshift.labels.env=stage +%stage.quarkus.openshift.route.tls.termination=edge %stage.quarkus.openshift.route.expose=true ########################################## From e755fe945cdeed93d2bac33a5f541247a1bc6749 Mon Sep 17 00:00:00 2001 From: Jonathan Christison Date: Wed, 14 Jun 2023 14:32:31 +0100 Subject: [PATCH 24/26] Use edge TLS termination Quarkus < 3.x doesn't support some of the route options, we need to apply this YAML instead and disable auto route enable still todo, combine yaml's into one for easier oc apply -f or have quarkus apply the snippets for us --- k8s/stage/edgeroute.yml | 20 ++++++++++++++++++++ k8s/{ => stage}/kerberos-config.yaml | 0 src/main/resources/application.properties | 9 +++++++-- 3 files changed, 27 insertions(+), 2 deletions(-) create mode 100644 k8s/stage/edgeroute.yml rename k8s/{ => stage}/kerberos-config.yaml (100%) diff --git a/k8s/stage/edgeroute.yml b/k8s/stage/edgeroute.yml new file mode 100644 index 0000000..b7b22b0 --- /dev/null +++ b/k8s/stage/edgeroute.yml @@ -0,0 +1,20 @@ +apiVersion: route.openshift.io/v1 +kind: Route +metadata: + creationTimestamp: null + labels: + app.kubernetes.io/name: osh + app.kubernetes.io/version: 1.0.0-SNAPSHOT + app.openshift.io/runtime: quarkus + env: stage + name: osh +spec: + port: + targetPort: http + tls: + termination: edge + to: + kind: "" + name: osh + weight: null +status: {} diff --git a/k8s/kerberos-config.yaml b/k8s/stage/kerberos-config.yaml similarity index 100% rename from k8s/kerberos-config.yaml rename to k8s/stage/kerberos-config.yaml diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index cfe76d7..5edad7b 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -12,8 +12,14 @@ %stage.quarkus.openshift.name=osh %stage.quarkus.openshift.labels.env=stage +%stage.quarkus.log.level=DEBUG + +#Only in Quarkus > 3.x %stage.quarkus.openshift.route.tls.termination=edge -%stage.quarkus.openshift.route.expose=true +#As we cant create a edge terminated route (quarkus <3.x) lets disable route creation for now +%stage.quarkus.openshift.route.expose=false +%stage.quarkus.openshift.route.target-port=https +%stage.quarkus.openshift.route.tls.insecure-edge-termination-policy=redirect ########################################## # Kerberos Specifics # @@ -31,5 +37,4 @@ %stage.quarkus.openshift.mounts.osh-wrapper-config-vol.read-only=true -%stage.quarkus.log.level=DEBUG From fa4ea264e2ca02331b93632c9dcc75be5071e3d6 Mon Sep 17 00:00:00 2001 From: Jonathan Christison Date: Wed, 14 Jun 2023 14:37:37 +0100 Subject: [PATCH 25/26] Add a comment on how the file was created --- k8s/stage/edgeroute.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/k8s/stage/edgeroute.yml b/k8s/stage/edgeroute.yml index b7b22b0..735c1a0 100644 --- a/k8s/stage/edgeroute.yml +++ b/k8s/stage/edgeroute.yml @@ -1,3 +1,4 @@ +#oc create route edge --service=osh --dry-run=client -o yaml > edgeroute.yml apiVersion: route.openshift.io/v1 kind: Route metadata: From 1c1007b811e47d9605310f7e60554146d6e102ac Mon Sep 17 00:00:00 2001 From: Nicholas Caughey Date: Thu, 15 Jun 2023 16:59:59 +0100 Subject: [PATCH 26/26] changing the groupid to be associated with the project --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 0f4a34f..45de826 100644 --- a/pom.xml +++ b/pom.xml @@ -9,7 +9,7 @@ 4.0.0 - com.redhat.ncaughey + com.redhat.pctOshWrapper osh 1.0.0-SNAPSHOT