-
Notifications
You must be signed in to change notification settings - Fork 14
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #259 from ThalesGroup/snowflake
Databricks initial checkin
- Loading branch information
Showing
9 changed files
with
1,600 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,2 @@ | ||
# Integration with Databricks using User-Defined Functions | ||
Includes both CADP and CRDP examples. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,99 @@ | ||
|
||
|
||
|
||
<project xmlns="http://maven.apache.org/POM/4.0.0" | ||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | ||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | ||
<modelVersion>4.0.0</modelVersion> | ||
|
||
<groupId>Thales</groupId> | ||
<artifactId>Thales-Databricks-UDF</artifactId> | ||
<version>7.0-SNAPSHOT</version> | ||
<packaging>jar</packaging> | ||
<properties> | ||
<maven.compiler.source>1.8</maven.compiler.source> | ||
<maven.compiler.target>1.8</maven.compiler.target> | ||
</properties> | ||
|
||
<dependencies> | ||
<dependency> | ||
<groupId>io.github.thalescpl-io.cadp</groupId> | ||
<artifactId>CADP_for_JAVA</artifactId> | ||
<version>8.16.0.000</version> | ||
</dependency> | ||
<dependency> | ||
<groupId>com.squareup.okhttp3</groupId> | ||
<artifactId>okhttp</artifactId> | ||
<version>4.10.0</version> | ||
</dependency> | ||
<!-- Dependency for Java Cryptography --> | ||
<dependency> | ||
<groupId>javax.xml.bind</groupId> | ||
<artifactId>jaxb-api</artifactId> | ||
<version>2.3.1</version> | ||
</dependency> | ||
|
||
<!-- Apache Spark --> | ||
<dependency> | ||
<groupId>org.apache.spark</groupId> | ||
<artifactId>spark-core_2.12</artifactId> | ||
<version>3.0.0</version> | ||
<scope>provided</scope> | ||
</dependency> | ||
|
||
<!-- Spark SQL Dependency --> | ||
<dependency> | ||
<groupId>org.apache.spark</groupId> | ||
<artifactId>spark-sql_2.12</artifactId> | ||
<version>3.0.0</version> | ||
<scope>provided</scope> | ||
</dependency> | ||
|
||
<dependency> | ||
<groupId>org.antlr</groupId> | ||
<artifactId>antlr4-runtime</artifactId> | ||
<version>4.9.3</version> | ||
</dependency> | ||
</dependencies> | ||
|
||
<build> | ||
<plugins> | ||
<plugin> | ||
<groupId>org.apache.maven.plugins</groupId> | ||
<artifactId>maven-compiler-plugin</artifactId> | ||
<version>3.8.1</version> | ||
<configuration> | ||
<source>1.8</source> | ||
<target>1.8</target> | ||
</configuration> | ||
</plugin> | ||
<!-- Maven Compiler Plugin --> | ||
<plugin> | ||
<artifactId>maven-assembly-plugin</artifactId> | ||
<executions> | ||
<execution> | ||
<phase>package</phase> | ||
<goals> | ||
<goal>single</goal> | ||
</goals> | ||
</execution> | ||
</executions> | ||
<configuration> | ||
<descriptorRefs> | ||
<descriptorRef>jar-with-dependencies</descriptorRef> | ||
</descriptorRefs> | ||
</configuration> | ||
</plugin> | ||
|
||
<plugin> | ||
<groupId>org.antlr</groupId> | ||
<artifactId>antlr4-maven-plugin</artifactId> | ||
<version>4.9.3</version> | ||
<configuration> | ||
<!-- configuration options --> | ||
</configuration> | ||
</plugin> | ||
</plugins> | ||
</build> | ||
|
||
</project> |
213 changes: 213 additions & 0 deletions
213
database/databricks/src/main/java/com/example/ThalesDataBricksCADPFPE.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,213 @@ | ||
package example; | ||
|
||
import javax.crypto.Cipher; | ||
import javax.crypto.spec.IvParameterSpec; | ||
|
||
import com.ingrian.security.nae.FPECharset; | ||
import com.ingrian.security.nae.FPEParameterAndFormatSpec; | ||
import com.ingrian.security.nae.IngrianProvider; | ||
import com.ingrian.security.nae.NAEKey; | ||
import com.ingrian.security.nae.NAESession; | ||
import com.ingrian.security.nae.FPEParameterAndFormatSpec.FPEParameterAndFormatBuilder; | ||
import java.io.InputStream; | ||
import java.math.BigInteger; | ||
import java.util.Properties; | ||
|
||
public class ThalesDataBricksCADPFPE { | ||
// @Override | ||
/* | ||
* This test app to test the logic for a Databricks User Defined Function(UDF). It is an example of how to use | ||
* Thales Cipher Trust Application Data Protection (CADP) to protect sensitive data in a column. This example uses | ||
* Format Preserve Encryption (FPE) to maintain the original format of the data so applications or business | ||
* intelligence tools do not have to change in order to use these columns. | ||
* | ||
* Note: This source code is only to be used for testing and proof of concepts. Not production ready code. Was not | ||
* tested for all possible data sizes and combinations of encryption algorithms and IV, etc. Was tested with CM 2.14 | ||
* & CADP 8.16 For more information on CADP see link below. | ||
* https://thalesdocs.com/ctp/con/cadp/cadp-java/latest/admin/index.html | ||
* | ||
* @author mwarner | ||
* | ||
*/ | ||
|
||
private static final IngrianProvider provider; | ||
private static Properties properties; | ||
|
||
static { | ||
try (InputStream input = ThalesDataBricksCADPFPE.class.getClassLoader() | ||
.getResourceAsStream("udfConfig.properties")) { | ||
properties = new Properties(); | ||
if (input == null) { | ||
throw new RuntimeException("Unable to find udfConfig.properties"); | ||
} | ||
properties.load(input); | ||
} catch (Exception ex) { | ||
throw new RuntimeException("Error loading properties file", ex); | ||
} | ||
} | ||
|
||
static { | ||
try { | ||
// Load the properties file as an InputStream | ||
System.setProperty("com.ingrian.security.nae.CADP_for_JAVA_Properties_Conf_Filename", | ||
"CADP_for_JAVA_Public.properties"); | ||
// InputStream inputStream = | ||
// ThalesDataBricksCADPFPE.class.getClassLoader().getResourceAsStream("CADP_for_JAVA.properties"); | ||
InputStream inputStream = ThalesDataBricksCADPFPE.class.getClassLoader() | ||
.getResourceAsStream("CADP_for_JAVA_Public.properties"); | ||
if (inputStream == null) { | ||
throw new RuntimeException("Failed to find CADP_for_JAVA.properties file."); | ||
} | ||
|
||
// Initialize the IngrianProvider using the static context | ||
provider = new IngrianProvider.Builder().addConfigFileInputStream(inputStream).build(); | ||
} catch (Exception e) { | ||
e.printStackTrace(); | ||
throw new RuntimeException("Failed to initialize IngrianProvider.", e); | ||
} | ||
} | ||
|
||
public static void main(String[] args) throws Exception | ||
|
||
{ | ||
String request_decrypt_char = "thisis a test this is only a test"; | ||
// String request_decrypt_nbr = "4356346534533"; | ||
// 8310258662548 | ||
String request_decrypt_nbr = "8310258662548"; | ||
String request = "554"; | ||
String response = null; | ||
System.out.println("input data = " + request); | ||
String mode = "encrypt"; | ||
String datatype = "nbr"; | ||
|
||
System.out.println("results = " + thales_cadp_udf(request, mode, datatype)); | ||
} | ||
|
||
public static String thales_cadp_udf(String databricks_inputdata, String mode, String datatype) throws Exception { | ||
|
||
if (databricks_inputdata != null && !databricks_inputdata.isEmpty()) { | ||
if (databricks_inputdata.length() < 2) | ||
return databricks_inputdata; | ||
|
||
if (!datatype.equalsIgnoreCase("char")) { | ||
|
||
BigInteger lowerBound = BigInteger.valueOf(-9); | ||
BigInteger upperBound = BigInteger.valueOf(-1); | ||
|
||
try { | ||
// Convert the string to an integer | ||
BigInteger number = new BigInteger(databricks_inputdata); | ||
|
||
// Check if the number is between -1 and -9 | ||
if (number.compareTo(lowerBound) >= 0 && number.compareTo(upperBound) <= 0) { | ||
System.out.println("The input is a negative number between -1 and -9."); | ||
return databricks_inputdata; | ||
} | ||
} catch (NumberFormatException e) { | ||
System.out.println("The input is not a valid number."); | ||
return databricks_inputdata; | ||
} | ||
} | ||
|
||
} else { | ||
System.out.println("The input is either null or empty."); | ||
return databricks_inputdata; | ||
} | ||
|
||
String keyName = "testfaas"; | ||
// String userName = System.getenv("CMUSER"); | ||
String userName = properties.getProperty("CMUSER"); | ||
if (userName == null) { | ||
throw new IllegalArgumentException("No CMUSER found for UDF: "); | ||
} | ||
// String password = System.getenv("CMPWD"); | ||
String password = properties.getProperty("CMPWD"); | ||
String returnciphertextforuserwithnokeyaccess = properties | ||
.getProperty("returnciphertextforuserwithnokeyaccess"); | ||
// yes,no | ||
boolean returnciphertextbool = returnciphertextforuserwithnokeyaccess.equalsIgnoreCase("yes"); | ||
|
||
NAESession session = null; | ||
String formattedString = null; | ||
|
||
try { | ||
|
||
session = NAESession.getSession(userName, password.toCharArray()); | ||
NAEKey key = NAEKey.getSecretKey(keyName, session); | ||
|
||
IvParameterSpec ivSpec = null; | ||
|
||
int cipherType = 0; | ||
String algorithm = "FPE/FF1/CARD62"; | ||
|
||
String tweakAlgo = null; | ||
String tweakData = null; | ||
FPEParameterAndFormatSpec param = new FPEParameterAndFormatBuilder(tweakData).set_tweakAlgorithm(tweakAlgo) | ||
.build(); | ||
|
||
if (mode.equals("encrypt")) | ||
cipherType = Cipher.ENCRYPT_MODE; | ||
else | ||
cipherType = Cipher.DECRYPT_MODE; | ||
|
||
if (datatype.equals("char")) | ||
algorithm = "FPE/FF1/CARD62"; | ||
else { | ||
algorithm = "FPE/FF1/CARD10"; | ||
// Can define own charset. | ||
// FPECharset charset = FPECharset.getUnicodeRangeCharset("31-39"); | ||
// param = new | ||
// FPEParameterAndFormatBuilder(tweakData).set_tweakAlgorithm(tweakAlgo).set_charset(tweakData) | ||
// .build(); | ||
} | ||
|
||
ivSpec = param; | ||
Cipher thalesCipher = Cipher.getInstance(algorithm, "IngrianProvider"); | ||
thalesCipher.init(cipherType, key, ivSpec); | ||
|
||
byte[] outbuf; | ||
try { | ||
outbuf = thalesCipher.doFinal(databricks_inputdata.getBytes()); | ||
formattedString = new String(outbuf); | ||
} catch (Exception e) { | ||
String errormsgkeyaccess = new String("User is not authorized to perform this operation"); | ||
|
||
String errormsg = e.getMessage(); | ||
if (errormsg.startsWith(errormsgkeyaccess)) { | ||
if (returnciphertextbool) | ||
formattedString = databricks_inputdata; | ||
else | ||
formattedString = null; | ||
} | ||
} | ||
|
||
} catch (Exception e) { | ||
|
||
System.out.println("in exception with " + e.getMessage()); | ||
|
||
if (returnciphertextbool) { | ||
if (e.getMessage().contains("1401") | ||
|| (e.getMessage().contains("1001") || (e.getMessage().contains("1002")))) { | ||
if (databricks_inputdata != null) { | ||
formattedString = databricks_inputdata; | ||
} | ||
|
||
} else { | ||
e.printStackTrace(System.out); | ||
|
||
} | ||
} else { | ||
e.printStackTrace(System.out); | ||
|
||
} | ||
} finally | ||
|
||
{ | ||
if (session != null) { | ||
session.closeSession(); | ||
} | ||
} | ||
return formattedString; | ||
|
||
} | ||
} |
Oops, something went wrong.