diff --git a/README.md b/README.md index 3245b14..dab2586 100644 --- a/README.md +++ b/README.md @@ -2,3 +2,9 @@ Originally written in the days of trying to do something, with no budget, I wrote this out of necessity. I subsequently got permission to open-source it so long as there were no references to the company in it. So this is the cleaned up version, with a few additional features added and the things that turned out to be pointless removed. It's probably the smallest functional ETL application with decent performance. Since I only use it on Postgres nowadays, it only officially supports Postgres at the moment. But in the near past it's worked pulling data from "several commercial databases" that don't like being named in benchmarks etc. and if you have the JDBC jars in your classpath then it should just work. + +For an example config file, please see [TEST_config_live.xml](https://github.com/rasilon/ujetl/blob/master/src/test/resources/TEST_config_live.xml) + +To run the dockerised integration tests, use `build_util/run_docker_tests` in this repo. + +A runnable docker image is available at [rasilon/ujetl](https://cloud.docker.com/repository/docker/rasilon/ujetl). This expects config files copied into, or mounted into `/var/ujetl/`. RPMs can be built using `build_util/build_rpms_in_docker`. As the name suggests, you need docker for that. diff --git a/build_util/build_in_docker b/build_util/build_in_docker deleted file mode 100755 index 72d8b76..0000000 --- a/build_util/build_in_docker +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash -set -e - -docker build --rm -t local/c7-buildhost docker - -docker run -it -v `pwd`:/root/build local/c7-buildhost /root/build/build_util/build_rpm diff --git a/build_util/build_rpm b/build_util/build_rpm deleted file mode 100755 index 769915f..0000000 --- a/build_util/build_rpm +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -e - -cd /root -cp -Rv build build2 -cd build2 - -SPEC=$(ls *.spec) -VER=$(grep Version $SPEC | awk '{print $2}') -tar cvf $HOME/rpmbuild/SOURCES/uJETL-${VER}.tar.gz --show-transformed --transform="s/^\./uJETL-${VER}/" . -rpmbuild -ba $SPEC -cp /root/rpmbuild/RPMS/x86_64/* /root/build/ diff --git a/build_util/create_run_docker b/build_util/create_run_docker new file mode 100755 index 0000000..864fdf6 --- /dev/null +++ b/build_util/create_run_docker @@ -0,0 +1,3 @@ +#!/bin/bash +docker build --target deploy -t rasilon/ujetl docker/multistage +docker tag rasilon/ujetl:latest rasilon/ujetl:$(xpath -q -e '/project/version/text()' pom.xml) diff --git a/build_util/push_docker_images b/build_util/push_docker_images new file mode 100755 index 0000000..14afd64 --- /dev/null +++ b/build_util/push_docker_images @@ -0,0 +1,4 @@ +#!/bin/bash +docker push rasilon/ujetl:latest +docker push rasilon/ujetl:$(xpath -q -e '/project/version/text()' pom.xml) + diff --git a/build_util/run_docker_tests b/build_util/run_docker_tests new file mode 100755 index 0000000..b8317c8 --- /dev/null +++ b/build_util/run_docker_tests @@ -0,0 +1,6 @@ +#!/bin/bash + +docker build --target tester -t rasilon/ujetl_tester docker/multistage +docker-compose -f docker/test_compose/docker-compose.yml run --rm tests +docker-compose -f docker/test_compose/docker-compose.yml down + diff --git a/config_util/ujetl_insert_generator.sql b/config_util/ujetl_insert_generator.sql new file mode 100644 index 0000000..a7375f6 --- /dev/null +++ b/config_util/ujetl_insert_generator.sql @@ -0,0 +1,76 @@ +CREATE OR REPLACE FUNCTION pg_temp.ujetl_insert(sch text, tabname text) + RETURNS text + LANGUAGE plpgsql +AS $function$ +declare + s text := ''; + header text := ''; + col_list text := ''; + vals text := ''; + sets text := ''; + changes text := ''; + is_first boolean := true; + colinfo record; + pks text; +begin + SELECT + array_to_string(array_agg(quote_ident(pg_attribute.attname::text) ),', ') into pks + FROM + pg_index, + pg_class, + pg_attribute, + pg_namespace + WHERE + pg_class.relname = tabname + AND indrelid = pg_class.oid + AND nspname = sch + AND pg_class.relnamespace = pg_namespace.oid + AND pg_attribute.attrelid = pg_class.oid + AND pg_attribute.attnum = any(pg_index.indkey) + AND indisprimary ; + + header := E'INSERT INTO '||quote_ident(sch)||'.'||quote_ident(tabname)||E' as t (\n '; + for colinfo in + select + * + from + information_schema.columns + where + table_schema = sch + and table_name = tabname + order by ordinal_position + loop + if not is_first then + col_list := col_list || E',\n '; + vals := vals || E',\n '; + sets := sets || E',\n '; + changes := changes || E'\n OR '; + end if; + col_list := col_list || quote_ident(colinfo.column_name); + vals := vals || '?::' || colinfo.data_type; + sets := sets || quote_ident(colinfo.column_name) || + E' = EXCLUDED.' || quote_ident(colinfo.column_name); + changes := changes || E't.' || quote_ident(colinfo.column_name) || + E' IS DISTINCT FROM EXCLUDED.' || quote_ident(colinfo.column_name); + + is_first = false; + end loop; + + s := coalesce(header,'header failed') || + coalesce(col_list,'col_list failed') || + E'\n)VALUES(\n ' || + coalesce(vals,'vals failed') || + E')\nON CONFLICT(' || coalesce(pks,'No primary keys found') || E') DO UPDATE\nSET\n ' || + coalesce(sets,'sets failed') || + E'\nWHERE\n '|| + coalesce(changes,'changes failed'); + return s; +end; +$function$ +; + + + + + + diff --git a/config_util/ujetl_select_generator.sql b/config_util/ujetl_select_generator.sql new file mode 100644 index 0000000..5c475af --- /dev/null +++ b/config_util/ujetl_select_generator.sql @@ -0,0 +1,65 @@ +CREATE OR REPLACE FUNCTION pg_temp.ujetl_select(sch text, tabname text) + RETURNS text + LANGUAGE plpgsql +AS $function$ +declare + s text := ''; + header text := ''; + col_list text := ''; + vals text := ''; + sets text := ''; + changes text := ''; + is_first boolean := true; + colinfo record; + pks text; +begin + SELECT + array_to_string(array_agg(quote_ident(pg_attribute.attname::text) ),', ') into pks + FROM + pg_index, + pg_class, + pg_attribute, + pg_namespace + WHERE + pg_class.relname = tabname + AND indrelid = pg_class.oid + AND nspname = sch + AND pg_class.relnamespace = pg_namespace.oid + AND pg_attribute.attrelid = pg_class.oid + AND pg_attribute.attnum = any(pg_index.indkey) + AND indisprimary ; + + header := E'SELECT\n '; + for colinfo in + select + * + from + information_schema.columns + where + table_schema = sch + and table_name = tabname + order by ordinal_position + loop + if not is_first then + col_list := col_list || E',\n '; + end if; + col_list := col_list || quote_ident(colinfo.column_name); + + is_first = false; + end loop; + + s := header || + coalesce(col_list,'col_list failed') || + E'\nFROM\n ' || + quote_ident(sch)||'.'||quote_ident(tabname)||E' as t \n '|| + E'WHERE\n insert criteria here >= ?::datatype'; + return s; +end; +$function$ +; + + + + + + diff --git a/docker/Dockerfile b/docker/build/Dockerfile similarity index 93% rename from docker/Dockerfile rename to docker/build/Dockerfile index 207828b..c780b8f 100644 --- a/docker/Dockerfile +++ b/docker/build/Dockerfile @@ -3,7 +3,7 @@ # VERSION 0.0.1 FROM centos:centos7 -MAINTAINER Derry Hamilton +MAINTAINER Derry Hamilton # Install up-to-date epel rpm repository RUN yum -y install epel-release diff --git a/docker/multistage/Dockerfile b/docker/multistage/Dockerfile new file mode 100644 index 0000000..dfc23a9 --- /dev/null +++ b/docker/multistage/Dockerfile @@ -0,0 +1,27 @@ +FROM ubuntu:22.04 as builder +RUN apt-get update && apt-get -y upgrade +RUN apt-get -y install openjdk-19-jdk-headless maven git +RUN git clone --single-branch --branch main https://github.com/rasilon/ujetl.git +RUN cd ujetl && mvn -e package + +FROM openjdk:11 as runner +LABEL maintainer="Derry Hamilton " + +RUN apt update && apt upgrade -y && apt install -y bash + +RUN mkdir -p /usr/share/ujetl/lib/ /var/ujetl /etc/ujetl + +COPY --from=builder /ujetl/target/CopyingApp-2.*-jar-with-dependencies.jar /usr/share/ujetl/lib/CopyingApp.jar +COPY --from=builder /ujetl/install_extra/copying_defaults_log4j.xml /etc/ujetl/ +COPY ujetl_entrypoint / +CMD ["/ujetl_entrypoint"] + + +FROM runner as tester +COPY TEST_config_live.xml /var/ujetl/ +COPY wait_for_postgres / +RUN apt-get install -y postgresql-client + + +FROM runner as deploy +# Convice docker cloud to build the deploy image diff --git a/docker/multistage/TEST_config_live.xml b/docker/multistage/TEST_config_live.xml new file mode 100644 index 0000000..d1b7345 --- /dev/null +++ b/docker/multistage/TEST_config_live.xml @@ -0,0 +1,167 @@ + + + 360000 + 10000 + 1000 + 500 + + org.postgresql.Driver + org.relique.jdbc.csv.CsvDriver + + + + jdbc:postgresql://testdb:5432/test + test + test + 600000 + + + jdbc:postgresql://testdb:5432/test + test + test + + + + test + select 'PID:'||pg_backend_pid() + select 'PID:'||pg_backend_pid() + select coalesce(-1,max(id),-1) as key from dest + + + insert into public.dest( + id, + test_int, + test_text, + test_ts + )values( + ?::bigint, + ?::integer, + ?::text, + ?::timestamp with time zone + )ON CONFLICT(id) DO UPDATE + set + test_int = EXCLUDED.test_int, + test_text = EXCLUDED.test_text, + test_ts = EXCLUDED.test_ts + WHERE + dest.test_int = EXCLUDED.test_int + OR dest.test_text = EXCLUDED.test_text + OR dest.test_ts = EXCLUDED.test_ts + + + + test upsert + select 'PID:'||pg_backend_pid() + select 'PID:'||pg_backend_pid() + select -1 as key + + + insert into public.dest( + id, + test_int, + test_text, + test_ts + )values( + ?::bigint, + ?::integer, + ?::text, + ?::timestamp with time zone + )ON CONFLICT(id) DO UPDATE + set + test_int = EXCLUDED.test_int, + test_text = EXCLUDED.test_text, + test_ts = EXCLUDED.test_ts + WHERE + dest.test_int IS DISTINCT FROM EXCLUDED.test_int + OR dest.test_text IS DISTINCT FROM EXCLUDED.test_text + OR dest.test_ts IS DISTINCT FROM EXCLUDED.test_ts + + + + denormalise + select 'PID:'||pg_backend_pid() + select 'PID:'||pg_backend_pid() + select -1 as key + + + INSERT INTO denormalised_personalia(person_id,fname,lname) + values(?::integer,?::text,?::text) + ON CONFLICT (person_id) DO UPDATE + SET + fname = EXCLUDED.fname, + lname = EXCLUDED.lname + WHERE + denormalised_personalia.fname is distinct from EXCLUDED.fname + OR denormalised_personalia.lname is distinct from EXCLUDED.lname + + + + test pre post + select -1 as key + + + drop table if exists tmp_dest; + create temp table tmp_dest( + id bigint, + test_int integer, + test_text text, + test_ts timestamp with time zone + ); + + + insert into tmp_dest( + id, + test_int, + test_text, + test_ts + )values( + ?::bigint, + ?::integer, + ?::text, + ?::timestamp with time zone + ) + + + insert into public.dest( + id, + test_int, + test_text, + test_ts + ) + select id,test_int,test_text,test_ts + from tmp_dest + ON CONFLICT(id) DO UPDATE + set + test_int = EXCLUDED.test_int, + test_text = EXCLUDED.test_text, + test_ts = EXCLUDED.test_ts + WHERE + dest.test_int IS DISTINCT FROM EXCLUDED.test_int + OR dest.test_text IS DISTINCT FROM EXCLUDED.test_text + OR dest.test_ts IS DISTINCT FROM EXCLUDED.test_ts + + + + diff --git a/docker/multistage/small.csv b/docker/multistage/small.csv new file mode 100644 index 0000000..37ddeff --- /dev/null +++ b/docker/multistage/small.csv @@ -0,0 +1,4 @@ +id,dat +1,banana +2,potato +3,nugget diff --git a/docker/multistage/ujetl_entrypoint b/docker/multistage/ujetl_entrypoint new file mode 100755 index 0000000..03870df --- /dev/null +++ b/docker/multistage/ujetl_entrypoint @@ -0,0 +1,22 @@ +#!/bin/bash +set -e + +LOG_PROPS=/etc/ujetl/copying_defaults_log4j.xml + +cd /var/ujetl +echo Currently in `pwd` +echo processing files: +ls +echo Starting run loop +for file in *.xml +do + /usr/local/openjdk-11/bin/java \ + -Xms1g \ + -Xmx2g \ + -cp /usr/share/ujetl/lib/CopyingApp.jar \ + -Dlog4j.configurationFile="$LOG_PROPS" \ + com.rasilon.ujetl.CopyingApp \ + --log4j "$LOG_PROPS" \ + --config "$file" +done + diff --git a/docker/multistage/wait_for_postgres b/docker/multistage/wait_for_postgres new file mode 100755 index 0000000..9676149 --- /dev/null +++ b/docker/multistage/wait_for_postgres @@ -0,0 +1,21 @@ +#!/bin/bash +set -e + +cmd="$@" + +until PGPASSWORD=test psql -h "testdb" -U "test" -c 'SELECT 1 FROM public.container_ready' postgres; do + >&2 echo "Postgres is unavailable - sleeping" + sleep 1 +done + +>&2 echo "Postgres is up - Waiting for the reboot" +sleep 3 # Wait for the Postgres reboot at the end of setup + +until PGPASSWORD=test psql -h "testdb" -U "test" -c 'SELECT 1 FROM public.container_ready' postgres; do + >&2 echo "Postgres is unavailable - sleeping" + sleep 1 +done + + +>&2 echo "Postgres is up - executing command" +exec $cmd diff --git a/docker/test_compose/docker-compose.yml b/docker/test_compose/docker-compose.yml new file mode 100644 index 0000000..91bd3d9 --- /dev/null +++ b/docker/test_compose/docker-compose.yml @@ -0,0 +1,23 @@ +# This is a sample to help put the full application together + +version: '3.3' + +services: + testdb: + image: rasilon/ujetl_testdb:latest + build: + context: ../test_db + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: password + POSTGRES_DB: postgres + tests: + image: rasilon/ujetl_tester:latest + build: + context: ../multistage + links: + - "testdb" + command: ["/wait_for_postgres", "/ujetl_entrypoint"] + + + diff --git a/docker/test_db/Dockerfile b/docker/test_db/Dockerfile new file mode 100644 index 0000000..868ee9f --- /dev/null +++ b/docker/test_db/Dockerfile @@ -0,0 +1,3 @@ +FROM postgres:11 +COPY setup.sql /docker-entrypoint-initdb.d/ +COPY is_ready / diff --git a/docker/test_db/is_ready b/docker/test_db/is_ready new file mode 100644 index 0000000..a92c259 --- /dev/null +++ b/docker/test_db/is_ready @@ -0,0 +1,3 @@ +#!/bin/bash +/usr/lib/postgresql/9.6/bin/psql -U postgres -c "SELECT 1 FROM public.container_ready" postgres + diff --git a/docker/test_db/setup.sql b/docker/test_db/setup.sql new file mode 100644 index 0000000..2469967 --- /dev/null +++ b/docker/test_db/setup.sql @@ -0,0 +1,58 @@ +CREATE DATABASE test; +\c test +CREATE ROLE test login password 'test'; +CREATE UNLOGGED TABLE source ( + id bigserial primary key, + test_int integer, + test_text text, + test_ts timestamp with time zone +); +CREATE UNLOGGED TABLE dest ( + id bigint primary key, + test_int integer, + test_text text, + test_ts timestamp with time zone +); + + +GRANT SELECT ON source to test; +GRANT SELECT,INSERT,UPDATE,DELETE ON dest TO test; + +INSERT INTO source(test_int,test_text,test_ts) SELECT 1,'banana',now() FROM generate_series(1,100000); + +CREATE TABLE normalised_first_names( + fid smallserial not null primary key, + fname text not null unique +); +CREATE TABLE normalised_last_names( + lid smallserial not null primary key, + lname text not null unique +); +INSERT INTO normalised_first_names (fname) values ('Abigail'), ('Adam'), ('Beatrice'), ('Bruce'), ('Claire'), ('Clive'), ('Deborah'), ('Dave'); +INSERT INTO normalised_last_names (lname) values ('Adams'), ('Bellamy'), ('Clark'), ('Dabrowski'); + +CREATE TABLE normalised_personalia ( + person_id serial not null primary key, + fid smallint not null references normalised_first_names(fid), + lid smallint not null references normalised_last_names(lid) +); +insert into normalised_personalia(fid,lid) values (1,1), (1,2), (1,3), (1,4), (2,1), (2,2), (2,3), (2,4), (3,1), (3,2), (3,3), (3,4), (4,1), (4,2), (4,3), (4,4); + +CREATE TABLE denormalised_personalia( + person_id integer not null primary key, + fname text, + lname text +); + +CREATE TABLE test_csvjdbc( + id integer not null primary key, + dat text +); + +GRANT SELECT ON ALL TABLES IN SCHEMA public TO test; +GRANT SELECT,INSERT,UPDATE ON denormalised_personalia TO test; + +\c postgres +CREATE TABLE public.container_ready AS SELECT 1 FROM(VALUES(1)) AS a(a); +GRANT SELECT ON public.container_ready TO TEST; + diff --git a/install_extra/run_copying_job b/install_extra/run_copying_job index bf51414..be923bd 100755 --- a/install_extra/run_copying_job +++ b/install_extra/run_copying_job @@ -30,9 +30,9 @@ fi /usr/bin/java \ -Xms1g \ -Xmx2g \ + -Dlog4j.configurationFile="$LOG_PROPS" \ -cp /usr/share/ujetl/lib/CopyingApp.jar \ com.rasilon.ujetl.CopyingApp \ - --log4j "$LOG_PROPS" \ --config "/etc/ujetl/${JOBNAME}_config_live.xml" #rm -f $LOCKFILE diff --git a/pom.xml b/pom.xml index 945e0cd..6970fdb 100644 --- a/pom.xml +++ b/pom.xml @@ -6,19 +6,42 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/ma com.rasilon.ujetl CopyingApp jar - 2.0.1 + 2.5.2 uJETL - + https://github.com/rasilon/ujetl + + + UTF-8 + - junit - junit - 4.12 + org.junit.jupiter + junit-jupiter-api + 5.4.2 + test + + + org.junit.jupiter + junit-jupiter-engine + 5.4.2 + test + + + org.junit.vintage + junit-vintage-engine + 5.4.2 + test + + + com.h2database + h2 + 2.2.220 + test org.apache.commons commons-lang3 - 3.9 + 3.18.0 commons-logging @@ -28,13 +51,12 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/ma org.apache.commons commons-configuration2 - 2.4 + 2.10.1 - commons-beanutils commons-beanutils - 1.9.3 + 1.11.0 com.beust @@ -44,27 +66,31 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/ma org.apache.logging.log4j log4j-api - 2.11.2 + 2.17.1 org.apache.logging.log4j log4j-core - 2.11.2 + 2.25.3 org.postgresql postgresql - 42.2.5 + 42.7.2 + + + net.sourceforge.csvjdbc + csvjdbc + 1.0.40 maven-compiler-plugin - 2.3.2 + 3.8.0 - 1.8 - 1.8 + 11 @@ -88,6 +114,10 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/ma + + maven-surefire-plugin + 2.22.0 + diff --git a/src/main/java/com/rasilon/ujetl/CopyingApp.java b/src/main/java/com/rasilon/ujetl/CopyingApp.java index 6938888..fa830c8 100644 --- a/src/main/java/com/rasilon/ujetl/CopyingApp.java +++ b/src/main/java/com/rasilon/ujetl/CopyingApp.java @@ -34,10 +34,6 @@ public class CopyingApp { public static void main(String[] args) { CopyingAppCommandParser cli = new CopyingAppCommandParser(args); LoggerContext context = (org.apache.logging.log4j.core.LoggerContext) LogManager.getContext(false); - String log4jConfigLocation = cli.getLog4jConfigFile(); - File file = new File(log4jConfigLocation); - context.setConfigLocation(file.toURI()); - System.out.println("Config set from "+file.toURI()); CopyingApp app = new CopyingApp(cli); try { @@ -79,6 +75,7 @@ public class CopyingApp { Configuration config = configs.xml(cli.getConfigFile()); + loadDrivers(config); String hardLimitSeconds = config.getString("hardLimitSeconds"); if(hardLimitSeconds != null) { TimeLimiter hardLimit = new TimeLimiter(Integer.decode(hardLimitSeconds).intValue(),true); @@ -108,14 +105,14 @@ public class CopyingApp { log.info(String.format("%s - Setting Row count interval to default of 100 rows.",jobName)); } - Integer pollTimeout = null; - try { - pollTimeout = new Integer(config.getString("nRowsToLog")); - log.info(String.format("%s - Setting Poll timeout to %s milliseconds", jobName, pollTimeout)); - } catch(Exception e) { - pollTimeout = new Integer(1000); // If we don't have a new setting, use the old default - log.info(String.format("%s - Setting poll timeout to default of 1 second.",jobName)); - } + Integer pollTimeout = null; + try { + pollTimeout = new Integer(config.getString("pollTimeout")); + log.info(String.format("%s - Setting Poll timeout to %s milliseconds", jobName, pollTimeout)); + } catch(Exception e) { + pollTimeout = new Integer(1000); // If we don't have a new setting, use the old default + log.info(String.format("%s - Setting poll timeout to default of 1 second.",jobName)); + } @@ -131,7 +128,27 @@ public class CopyingApp { String tabKey = config.getString("jobs.job("+i+").key"); String tabSelect = config.getString("jobs.job("+i+").select"); String tabInsert = config.getString("jobs.job("+i+").insert"); - Job j = new Job(sConn,dConn,tabName,jobName,tabKey,tabSelect,tabInsert,nRowsToLog,blockSize,pollTimeout); + String preTarget = config.getString("jobs.job("+i+").preTarget"); + String postTarget = config.getString("jobs.job("+i+").postTarget"); + String identifySourceSQL = config.getString("jobs.job.identifySourceSQL"); + String identifyDestinationSQL = config.getString("jobs.job.identifyDestinationSQL"); + + Job j = new Job( + sConn, + dConn, + tabName, + jobName, + tabKey, + tabSelect, + tabInsert, + preTarget, + postTarget, + nRowsToLog, + blockSize, + pollTimeout, + identifySourceSQL, + identifyDestinationSQL + ); j.start(); j.join(); @@ -141,7 +158,28 @@ public class CopyingApp { String tabKey = config.getString("jobs.job.key"); String tabSelect = config.getString("jobs.job.select"); String tabInsert = config.getString("jobs.job.insert"); - Job j = new Job(sConn,dConn,tabName,jobName,tabKey,tabSelect,tabInsert,nRowsToLog,blockSize,pollTimeout); + String preTarget = config.getString("jobs.job.preTarget"); + String postTarget = config.getString("jobs.job.postTarget"); + String identifySourceSQL = config.getString("jobs.job.identifySourceSQL"); + String identifyDestinationSQL = config.getString("jobs.job.identifyDestinationSQL"); + + + Job j = new Job( + sConn, + dConn, + tabName, + jobName, + tabKey, + tabSelect, + tabInsert, + preTarget, + postTarget, + nRowsToLog, + blockSize, + pollTimeout, + identifySourceSQL, + identifyDestinationSQL + ); j.start(); j.join(); } else { @@ -203,4 +241,21 @@ public class CopyingApp { return c; } + + // Even with JDBC 4, some drivers don't play nicely with whatever + // the classloaders are up to. So this allows us to force it the + // old fashioned way, and works around the + // "But it works fine when it's the /only/ driver!" + // cross-database problem + private void loadDrivers(Configuration config) { + String[] drivers = config.get(String[].class, "drivers.driver"); + for(String d:drivers) { + try { + Class.forName(d); + log.info("Preloaded driver "+d); + } catch(ClassNotFoundException e) { + log.error("Could not preload driver "+d,e); + } + } + } } diff --git a/src/main/java/com/rasilon/ujetl/CopyingAppCommandParser.java b/src/main/java/com/rasilon/ujetl/CopyingAppCommandParser.java index d0d4d73..97d2dc8 100644 --- a/src/main/java/com/rasilon/ujetl/CopyingAppCommandParser.java +++ b/src/main/java/com/rasilon/ujetl/CopyingAppCommandParser.java @@ -12,7 +12,7 @@ public class CopyingAppCommandParser { private String configFile; @Parameter(names = {"-log4j","--log4j"}, description = "Log4J config file for this run") - private String log4jConfigFile = "/etc/ppl/default_log4j_config.properties"; + private String log4jConfigFile = "/etc/ujetl/default_log4j_config.properties"; public CopyingAppCommandParser(String[] args) { super(); @@ -23,8 +23,4 @@ public class CopyingAppCommandParser { return configFile; } - public String getLog4jConfigFile() { - return log4jConfigFile; - } - } diff --git a/src/main/java/com/rasilon/ujetl/Job.java b/src/main/java/com/rasilon/ujetl/Job.java index a083f56..ae680a4 100644 --- a/src/main/java/com/rasilon/ujetl/Job.java +++ b/src/main/java/com/rasilon/ujetl/Job.java @@ -22,23 +22,29 @@ import org.apache.logging.log4j.Logger; public class Job extends Thread { static Logger log = org.apache.logging.log4j.LogManager.getLogger(Job.class); - Connection sConn; - Connection dConn; - String name; - String jobName; - String key; - String select; - String insert; - Integer nRowsToLog; - Integer blockSize; - Integer pollTimeout; + private Connection sConn; + private Connection dConn; + private String name; + private String jobName; + private String key; + private String select; + private String insert; + private String preTarget; + private String postTarget; + private Integer nRowsToLog; + private Integer blockSize; + private Integer pollTimeout; + private String identifySourceSQL; + private String identifyDestinationSQL; - BlockingQueue> resultBuffer; - AtomicBoolean producerLive; - AtomicBoolean threadsExit = new AtomicBoolean(false);; + private BlockingQueue> resultBuffer; + private AtomicBoolean producerLive; + private AtomicBoolean threadsExit = new AtomicBoolean(false);; + private String sourceID; + private String destID; - public Job(Connection sConn,Connection dConn,String name,String jobName,String key,String select,String insert,Integer nRowsToLog,Integer blockSize,Integer pollTimeout) { + public Job(Connection sConn,Connection dConn,String name,String jobName,String key,String select,String insert,String preTarget,String postTarget,Integer nRowsToLog,Integer blockSize,Integer pollTimeout,String identifySourceSQL, String identifyDestinationSQL) { this.sConn = sConn; this.dConn = dConn; this.name = name; @@ -46,9 +52,13 @@ public class Job extends Thread { this.key = key; this.select = select; this.insert = insert; + this.preTarget = preTarget; + this.postTarget = postTarget; this.nRowsToLog = nRowsToLog; this.blockSize = blockSize; this.pollTimeout = pollTimeout; + this.identifySourceSQL = identifySourceSQL; + this.identifyDestinationSQL = identifyDestinationSQL; resultBuffer = new ArrayBlockingQueue>( 3 * blockSize); producerLive = new AtomicBoolean(true); @@ -72,12 +82,12 @@ public class Job extends Thread { public Producer(ResultSet src,BlockingQueue q) { this.src = src; this.q = q; - this.setName(String.format("%s-%s-Consumer",jobName,name)); + this.setName(String.format("%s-%s-Producer",jobName,name)); } public void run() { try { long rowsInserted = 0; - long rowNum = 0; + long rowsAttempted = 0; long stamp = System.nanoTime(); long nstamp; int columnCount = src.getMetaData().getColumnCount(); @@ -95,13 +105,13 @@ public class Job extends Thread { } log.trace("Producer queue full."); } - rowNum++; - if(rowNum % nRowsToLog == 0) { - log.info(String.format("%s - Queued %s rows for %s so far",jobName,rowNum,name)); + rowsAttempted++; + if(rowsAttempted % nRowsToLog == 0) { + log.info(String.format("%s - Queued %s rows for %s so far",jobName,rowsAttempted,name)); } } producerLive.set(false); - log.info(String.format("%s - Queued a total of %s rows for %s",jobName,rowNum,name)); + log.info(String.format("%s - Queued a total of %s rows for %s",jobName,rowsAttempted,name)); } catch(Exception e) { producerLive.set(false); // Signal we've exited. threadsExit.set(true); // Signal we've exited. @@ -122,7 +132,7 @@ public class Job extends Thread { } public void run() { try { - long rowNum = 0; + long rowsAttempted = 0; long rowsInserted = 0; while(true) { @@ -133,7 +143,7 @@ public class Job extends Thread { if(row == null && producerLive.get() == false) { rowsInserted += arraySum(insertStatement.executeBatch()); dConn.commit(); - log.info(String.format("%s - Inserted a total of %s of %s notified rows into %s",jobName,rowNum,rowsInserted,name)); + log.info(String.format("%s - Inserted a total of %s of %s notified rows into %s",jobName,rowsInserted,rowsAttempted,name)); return; } if(threadsExit.get()) { @@ -150,14 +160,14 @@ public class Job extends Thread { } insertStatement.addBatch(); - rowNum++; - if(rowNum % nRowsToLog == 0) { + rowsAttempted++; + if(rowsAttempted % nRowsToLog == 0) { rowsInserted += arraySum(insertStatement.executeBatch()); dConn.commit(); - log.info(String.format("%s - Inserted %s of %s notified rows into %s so far", + log.info(String.format("%s - Inserted %s of %s notified rows into %s", jobName, - rowNum, rowsInserted, + rowsAttempted, name)); } } @@ -169,11 +179,34 @@ public class Job extends Thread { } } + // Outer run public void run() { try { ResultSet rs; - log.info(String.format("%s - Processing table: %s",jobName,name)); + if(identifySourceSQL != null) sourceID = getSingleString(identifySourceSQL,sConn); + if(identifyDestinationSQL != null) destID = getSingleString(identifyDestinationSQL,dConn); + + if(sourceID != null || destID != null){ + log.info(String.format( + "%s - Processing table: %s with source: %s, dest: %s", + jobName, + name, + sourceID==null?"":sourceID, + destID==null?"":destID + )); + }else{ + log.info(String.format("%s - Processing table: %s",jobName,name)); + } + if(preTarget != null){ + log.info(String.format("%s - Trying to execute preTarget SQL",jobName)); + PreparedStatement s = dConn.prepareStatement(preTarget); + s.executeUpdate(); + s.close(); + dConn.commit(); + }else{ + log.info(String.format("%s - No preTarget; skipping.",jobName)); + } log.debug("Trying to execute: "+key); PreparedStatement keyStatement = dConn.prepareStatement(key); @@ -211,10 +244,33 @@ public class Job extends Thread { p.join(); c.join(); + if(postTarget != null){ + log.info(String.format("%s - Trying to execute postTarget SQL",jobName)); + PreparedStatement s = dConn.prepareStatement(postTarget); + s.executeUpdate(); + s.close(); + dConn.commit(); + }else{ + log.info(String.format("%s - No postTarget; skipping.",jobName)); + } + + } catch(InterruptedException e) { throw new RuntimeException(e); } catch(SQLException e) { throw new RuntimeException(e); } } + + private String getSingleString(String sql, Connection conn){ + try{ + PreparedStatement s = conn.prepareStatement(sql); + ResultSet r = s.executeQuery(); + r.next(); + return r.getString(1); + } catch(SQLException e) { + throw new RuntimeException(e); + } + } + } diff --git a/src/test/java/com/rasilon/ujetl/TestConfig.java b/src/test/java/com/rasilon/ujetl/TestConfig.java new file mode 100644 index 0000000..05f9d82 --- /dev/null +++ b/src/test/java/com/rasilon/ujetl/TestConfig.java @@ -0,0 +1,37 @@ +package com.rasilon.ujetl; + +import org.apache.commons.configuration2.Configuration; +import org.apache.commons.configuration2.builder.fluent.Configurations; +import org.apache.commons.configuration2.ex.ConfigurationException; + +import org.apache.commons.beanutils.PropertyUtils; // Why does config need this? + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.MethodOrderer.Alphanumeric; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + + +/** + * @author derryh + * + */ +public class TestConfig { + + @Test + public void test001VerifyArrayOfDrivers() { + try { + Configurations configs = new Configurations(); + Configuration config = configs.xml("TEST_config_live.xml"); + String[] drivers = config.get(String[].class, "drivers.driver"); + int ndrivers =drivers.length; + if(ndrivers != 3){ + fail("Expected 3 drivers, but found "+ndrivers); + } + } catch(Exception e) { + fail(e.toString()); + } + } + +} diff --git a/src/test/java/com/rasilon/ujetl/TestJob.java b/src/test/java/com/rasilon/ujetl/TestJob.java new file mode 100644 index 0000000..63ab4c8 --- /dev/null +++ b/src/test/java/com/rasilon/ujetl/TestJob.java @@ -0,0 +1,64 @@ +package com.rasilon.ujetl; + +import java.sql.*; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.MethodOrderer.Alphanumeric; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + + +public class TestJob { + + private static String jdbcURL = "jdbc:h2:mem:dbtest"; + @Test + public void test002verifyH2Works() { + try { + Connection conn = DriverManager.getConnection(jdbcURL, "sa", ""); + conn.close(); + } catch(Exception e) { + fail(e.toString()); + } + } + + @Test + public void testJob() { + try ( + Connection src = DriverManager.getConnection(jdbcURL, "sa", ""); + Connection dest = DriverManager.getConnection(jdbcURL, "sa", ""); + + ) { + src.createStatement().executeUpdate("CREATE TABLE src(id bigint not null primary key, dat varchar);"); + dest.createStatement().executeUpdate("CREATE TABLE dest(id bigint not null primary key, dat varchar);"); + PreparedStatement inserter = src.prepareStatement("INSERT INTO src(id,dat) VALUES(?,'banana')"); + for(int i=0; i<10000; i++) { + inserter.setInt(1,i); + inserter.executeUpdate(); + } + + Job j = new Job( + src, + dest, + "jUnit Test Config", + "jUnit Test Job", + "SELECT -1 AS \"key\"", + "SELECT id,dat FROM src WHERE id > ?", + "INSERT INTO dest VALUES(?,?)", + null, + null, + 100, + 100, + 100, + "select 'PID:'||session_id()", + "select 'PID:'||session_id()" + ); + j.start(); + j.join(); + // do stuff + } catch(Exception e) { + e.printStackTrace(); + fail(e.toString()); + } + } +} diff --git a/src/test/java/com/rasilon/ujetl/TestParser.java b/src/test/java/com/rasilon/ujetl/TestParser.java new file mode 100644 index 0000000..398f046 --- /dev/null +++ b/src/test/java/com/rasilon/ujetl/TestParser.java @@ -0,0 +1,27 @@ +package com.rasilon.ujetl; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.MethodOrderer.Alphanumeric; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + + +public class TestParser { + + @Test + public void test001Parset() { + try { + String[] args = { + "--config", + "config_test_banana.xml" + }; + CopyingAppCommandParser p = new CopyingAppCommandParser(args); + + assertEquals(p.getConfigFile(),"config_test_banana.xml"); + + } catch(Exception e) { + fail(e.toString()); + } + } +} diff --git a/src/test/java/com/rasilon/ujetl/TestPrePost.java b/src/test/java/com/rasilon/ujetl/TestPrePost.java new file mode 100644 index 0000000..55f3ee1 --- /dev/null +++ b/src/test/java/com/rasilon/ujetl/TestPrePost.java @@ -0,0 +1,64 @@ +package com.rasilon.ujetl; + +import java.sql.*; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.MethodOrderer.Alphanumeric; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + + +public class TestPrePost { + + private static String jdbcURL = "jdbc:h2:mem:dbtest"; + @Test + public void test002verifyH2Works() { + try { + Connection conn = DriverManager.getConnection(jdbcURL, "sa", ""); + conn.close(); + } catch(Exception e) { + fail(e.toString()); + } + } + + @Test + public void testPrePost() { + try ( + Connection src = DriverManager.getConnection(jdbcURL, "sa", ""); + Connection dest = DriverManager.getConnection(jdbcURL, "sa", ""); + + ) { + src.createStatement().executeUpdate("CREATE TABLE src(id bigint not null primary key, dat varchar);"); + dest.createStatement().executeUpdate("CREATE TABLE dest(id bigint not null primary key, dat varchar);"); + PreparedStatement inserter = src.prepareStatement("INSERT INTO src(id,dat) VALUES(?,'banana')"); + for(int i=0; i<10000; i++) { + inserter.setInt(1,i); + inserter.executeUpdate(); + } + + Job j = new Job( + src, + dest, + "jUnit Test Config", + "jUnit Test Job", + "SELECT -1 AS \"key\"", + "SELECT id,dat FROM src WHERE id > ?", + "INSERT INTO tmp_dest VALUES(?,?)", + "CREATE TEMP TABLE tmp_dest(id bigint not null primary key, dat varchar);", + "INSERT INTO dest SELECT * from tmp_dest;", + 100, + 100, + 100, + "select 'PID:'||session_id()", + "select 'PID:'||session_id()" + ); + j.start(); + j.join(); + // do stuff + } catch(Exception e) { + e.printStackTrace(); + fail(e.toString()); + } + } +} diff --git a/src/test/java/com/rasilon/ujetl/TestTimeLimiter.java b/src/test/java/com/rasilon/ujetl/TestTimeLimiter.java new file mode 100644 index 0000000..db12d51 --- /dev/null +++ b/src/test/java/com/rasilon/ujetl/TestTimeLimiter.java @@ -0,0 +1,28 @@ +package com.rasilon.ujetl; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.MethodOrderer.Alphanumeric; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + + +public class TestTimeLimiter { + + @Test + public void test001Limiter() { + try { + TimeLimiter hardLimit = new TimeLimiter(1,false); + hardLimit.start(); + + Thread.sleep(10000); + + fail("Sleep wasn't interrupted by the limiter!"); + } catch(java.lang.InterruptedException e) { + // Pass + } catch(Exception e) { + e.printStackTrace(); + fail("Unexpected exception."); + } + } +} diff --git a/src/test/resources/TEST_config_live.xml b/src/test/resources/TEST_config_live.xml index 90a65f5..210d8f1 100644 --- a/src/test/resources/TEST_config_live.xml +++ b/src/test/resources/TEST_config_live.xml @@ -3,6 +3,12 @@ 360000 10000 1000 + 500 + + org.postgresql.Driver + org.h2.Driver + org.relique.jdbc.csv.CsvDriver + jdbc:postgresql://localhost:5432/test test @@ -17,6 +23,8 @@ test + select 'PID:'||pg_backend_pid() + select 'PID:'||pg_backend_pid() select coalesce(-1,max(id),-1) as key from dest + select + id, + test_int, + test_text, + test_ts + from + public.source where id > ?::bigint + + insert into public.dest( + id, + test_int, + test_text, + test_ts + )values( + ?::bigint, + ?::integer, + ?::text, + ?::timestamp with time zone + )ON CONFLICT(id) DO UPDATE + set + test_int = EXCLUDED.test_int, + test_text = EXCLUDED.test_text, + test_ts = EXCLUDED.test_ts + WHERE + dest.test_int IS DISTINCT FROM EXCLUDED.test_int + OR dest.test_text IS DISTINCT FROM EXCLUDED.test_text + OR dest.test_ts IS DISTINCT FROM EXCLUDED.test_ts + + + + denormalise + select -1 as key + + + INSERT INTO denormalised_personalia(person_id,fname,lname) + values(?::integer,?::text,?::text) + ON CONFLICT (person_id) DO UPDATE + SET + fname = EXCLUDED.fname, + lname = EXCLUDED.lname + WHERE + denormalised_personalia.fname is distinct from EXCLUDED.fname + OR denormalised_personalia.lname is distinct from EXCLUDED.lname + + diff --git a/src/test/resources/log4j2.xml b/src/test/resources/log4j2.xml new file mode 100644 index 0000000..adeb7a4 --- /dev/null +++ b/src/test/resources/log4j2.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/uJETL.spec b/uJETL.spec deleted file mode 100644 index 45a6bc0..0000000 --- a/uJETL.spec +++ /dev/null @@ -1,33 +0,0 @@ -Summary: Java app to facilitate moving data between databases. -Name: uJETL -Version: 2.0.1 -Release: 1 -Group: Applications/Database -License: All rights reserved. -Source: uJETL-%{version}.tar.gz -URL: https://github.com/rasilon/ujetl.git -Distribution: derryh -Vendor: derryh -Packager: Derry Hamilton -#BuildRoot: . - -%description -A very small ETL app - -%prep -%setup - -%build -#mvn -Dmaven.test.skip=true clean package -true - -%install -mkdir -p $RPM_BUILD_ROOT/usr/share/ujetl/lib $RPM_BUILD_ROOT/etc/ujetl $RPM_BUILD_ROOT/usr/bin -cp target/CopyingApp-*-jar-with-dependencies.jar $RPM_BUILD_ROOT/usr/share/ujetl/lib/CopyingApp.jar -cp install_extra/run_copying_job $RPM_BUILD_ROOT/usr/bin -cp install_extra/copying_defaults_log4j.xml $RPM_BUILD_ROOT/etc/ujetl - -%files -/usr/share/ujetl/lib/CopyingApp.jar -/usr/bin/run_copying_job -/etc/ujetl/copying_defaults_log4j.xml