Read TLV encoded data on JavaCard Classic - javacard

I'd like to read TLV encoded certificates on a Java Card (NXP JCOP J3D081, JCOP version 2.4.2, Java Card version 3.0.1 Classic).
The cap file is created successfully, but when I try to install it on the card I get an gpshell error:
load() returns 0x80206A80 (6A80: Wrong data / Incorrect values in command data.)
I use the jars from JCDK 3.0.3 and this usually works with stuff like elliptic curves etc. What could be different with the TLV stuff?
The applet code used (installs fine when not using the BERTLV stuff):
package org.thomas;
import javacard.framework.APDU;
import javacard.framework.Applet;
import javacard.framework.ISO7816;
import javacard.framework.ISOException;
import javacard.framework.Util;
import javacardx.framework.tlv.BERTLV;
import javacardx.framework.tlv.ConstructedBERTLV;
public class TlvApplet extends Applet {
private ConstructedBERTLV certificate;
byte[] certificateLength = new byte[2];
/**
* TlvApplet constructor
*
* #constructor
*/
private TlvApplet() {
// Register with the JCRE
register();
}
/**
* Installs applet
*
* #param bArray
* the array containing installation parameters
* #param bOffset
* the starting offset in bArray
* #param bLength
* the length in bytes of the parameter data in bArray
*/
public static void install(byte[] bArray, short bOffset, byte bLength) {
new TlvApplet();
}
public void process(APDU apdu) throws ISOException {
byte buffer[] = apdu.getBuffer();
short incomingLength = apdu.setIncomingAndReceive();
certificate = (ConstructedBERTLV) BERTLV.getInstance(buffer,
ISO7816.OFFSET_CDATA, incomingLength);
certificateLength[0] = (byte) (certificate.size() & 0xff);
certificateLength[1] = (byte) ((certificate.size() >> 8) & 0xff);
Util.arrayCopyNonAtomic(certificateLength, (short) 0, buffer,
ISO7816.OFFSET_CDATA, (short) 2);
}
}
I create the cap file by using the following ant build.xml:
<?xml version="1.0" encoding="UTF-8" ?>
<project default="convert" name="Create JC 3 applet" basedir=".">
<property environment="env" />
<!-- Include properties in the build.properties file -->
<property file="build.properties" />
<!-- Build specific properties -->
<property name="source.path" location="${basedir}/${source.folder}" />
<property name="jcdk.libs" location="${basedir}/${jcdk.basefolder}/${jcdk.subfolder.libs}" />
<property name="jcdk.apiexports" location="${basedir}/${jcdk.basefolder}/${jcdk.subfolder.apiexportfiles}" />
<property name="target.path.classes"
location="${basedir}/${target.basefolder}/${target.subfolder.classes}" />
<property name="target.path.cap"
location="${basedir}/${target.basefolder}/${target.subfolder.applet}" />
<!-- set the classpath for the tasks and the API, include all jar files -->
<path id="classpath" description="Sets the classpath to Java Card API and tools">
<fileset dir="${jcdk.libs}">
<include name="*.jar" />
</fileset>
</path>
<!-- set the export path to the Java Card export files -->
<path id="export" description="set the export file path">
<fileset dir="${jcdk.apiexports}">
<include name="**/*.exp" />
</fileset>
<pathelement path="${jcdk.apiexports}" />
<pathelement path="${target.path.classes}" />
</path>
<path id="capexport" description="set the export file for the cap path">
<fileset dir="${jcdk.apiexports}">
<include name="**/*.exp" />
</fileset>
<pathelement path="${jcdk.apiexports}" />
</path>
<!-- Definitions for tasks for Java Card tools -->
<taskdef name="apdu" classname="com.sun.javacard.ant.tasks.APDUToolTask" classpathref="classpath" />
<taskdef name="capgen" classname="com.sun.javacard.ant.tasks.CapgenTask" classpathref="classpath" />
<taskdef name="convert" classname="com.sun.javacard.ant.tasks.ConverterTask" classpathref="classpath" />
<taskdef name="verifyexport" classname="com.sun.javacard.ant.tasks.VerifyExpTask" classpathref="classpath" />
<taskdef name="verifycap" classname="com.sun.javacard.ant.tasks.VerifyCapTask" classpathref="classpath" />
<taskdef name="verifyrevision" classname="com.sun.javacard.ant.tasks.VerifyRevTask" classpathref="classpath" />
<typedef name="appletnameaid" classname="com.sun.javacard.ant.types.AppletNameAID" classpathref="classpath" />
<typedef name="jcainputfile" classname="com.sun.javacard.ant.types.JCAInputFile" classpathref="classpath" />
<typedef name="scriptgen" classname="com.sun.javacard.ant.tasks.ScriptgenTask" classpathref="classpath" />
<target name="init">
<mkdir dir="${target.path.classes}" />
</target>
<target name="clean">
<delete dir="${target.path.classes}" />
</target>
<target name="compile" depends="init" description="Compile source code to class files">
<javac debug="${javac.debug}"
optimize="${javac.optimize}"
srcdir="${source.path}"
destdir="${target.path.classes}"
source="${javac.version.source}"
target="${javac.version.target}">
<classpath refid="classpath" />
</javac>
</target>
<target name="convert" depends="compile" description="Convert class files to cap files">
<convert packagename="${package.name}"
packageaid="${package.aid}"
majorminorversion="${applet.version}"
classdir="${target.path.classes}"
outputdirectory="${target.path.cap}"
jca="${cap.creation.jca}"
exp="${cap.creation.exp}"
cap="true"
debug="${cap.creation.debug}" verbose="${cap.creation.verbose}"
noverify="${cap.creation.noverify}">
<appletnameaid aid="${applet.aid}" appletname="${applet.name}" />
<exportpath refid="capexport" />
<classpath refid="classpath" />
</convert>
</target>
<target name="all" depends="clean, convert" />
</project>
The build.properties used:
# Source folder
source.folder = src/org/thomas
# Java Card Development Kit folders
jcdk.basefolder = lib/javaCardKit303
jcdk.subfolder.apiexportfiles = api_export_files
jcdk.subfolder.libs = lib
# Target folders (will be created, no need to adapt)
target.basefolder = target
target.subfolder.classes = classes
target.subfolder.applet = applet
# Applet properties
package.aid = 0x41:0x41:0x41:0x41:0x41:0x41:0x41:0x41:0x41:0x42
package.name = org.thomas
applet.aid = 0x41:0x41:0x41:0x41:0x41:0x41:0x41:0x41:0x41:0x42:0x42
applet.name = TlvApplet
applet.version = 1.0
# Java class compiler options
javac.debug = no
javac.optimize = no
javac.version.source = 1.5
javac.version.target = 1.5
# CAP file creation options
cap.creation.verbose = false
cap.creation.noverify = false
cap.creation.debug = false
cap.creation.jca = false
cap.creation.exp = false
So suspected the cap to be compiled with the api_connected.jar, but removing the file from kit, didn't change anything.
Any help would be appreciated. Thanks in advance, Thomas

The card simply does not support the TLV classes of the java card API. The JC API is just a recommendation but the card manufacturer may choose to implement a subset of it.
In general anything in javacardx is optional (hence the x at the end). The API methods in javacard must be implemented though. Even for javacard packages and classes not all functionality may be available at runtime. For instance, cryptographic algorithms may not be present.

Related

Azure app service diagnostic blob not logging nlog based logs

I want to log nlog generated application logs in app service diagnostic blob [i.e, Application Logging (Blob)
] but only default logs are printed not the nlog based custom logs
but I can print Application Logging (Filesystem) when file target is added to nlog.config. The problem is only with blob.
nlog.config file:
<?xml version="1.0" encoding="utf-8" ?>
<nlog xmlns="http://www.nlog-project.org/schemas/NLog.xsd"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
autoReload="true"
throwConfigExceptions="true"
internalLogLevel="info"
internalLogFile="d:\home\LogFiles\temp\internal-nlog-AspNetCore3.txt">
<!-- enable asp.net core layout renderers -->
<extensions>
<add assembly="NLog.Web.AspNetCore"/>
</extensions>
<!-- the targets to write to -->
<targets>
<target xsi:type="Trace" name="String" layout="${level}\: ${logger}[0]${newline} |trace| ${message}${exception:format=tostring}" />
<target xsi:type="Console" name="lifetimeConsole" layout="${level}\: ${logger}[0]${newline} |console| ${message}${exception:format=tostring}" />
</targets>
<rules>
<logger name="*" minlevel="Trace" writeTo="lifetimeConsole,String" final="true"/>
</rules>
</nlog>
program.cs file
namespace testapp
{
public class Program
{
public static void Main(string[] args)
{
var logger = NLog.Web.NLogBuilder.ConfigureNLog("nlog.config").GetCurrentClassLogger();
try
{
logger.Debug("init main");
CreateHostBuilder(args).Build().Run();
}
catch (Exception exception)
{
logger.Error(exception, "Stopped program because of exception");
throw;
}
finally
{
NLog.LogManager.Shutdown();
}
}
public static IHostBuilder CreateHostBuilder(string[] args) =>
Host.CreateDefaultBuilder(args)
.ConfigureLogging(logging =>
{
logging.SetMinimumLevel(Microsoft.Extensions.Logging.LogLevel.Trace);
logging.AddConsole();
logging.AddDebug();
logging.AddAzureWebAppDiagnostics();
})
.UseNLog() // NLog: Setup NLog for Dependency injection
.ConfigureServices(serviceCollection => serviceCollection
.Configure<AzureBlobLoggerOptions>(options =>
{
options.BlobName = "testlog.txt";
}))
.ConfigureWebHostDefaults(webBuilder =>
{
webBuilder.UseStartup<Startup>();
});
}
}
The Nlog based loggings are not logged in app service diagnostic blob, instead only default logging is printed.
Kindly help to resolve this issue.
Seems that System.Diagnostics.Trace-Target works best for ASP.NET-application and not for ASP.NetCore applications in Azure.
When using AddAzureWebAppDiagnostics it will redirect all output written to Microsoft ILogger to FileSystem or Blob. But any output written to pure NLog Logger-objects will not be redirected.
Maybe the solution is to setup a NLog FileTarget writing to the HOME-directory:
<nlog>
<targets async="true">
<!-- Environment Variable %HOME% matches D:/Home -->
<target type="file" name="appfile" filename="${environment:HOME:cached=true}/logfiles/application/app-${shortdate}-${processid}.txt" />
</targets>
<rules>
<logger name="*" minLevel="Debug" writeTo="appFile" />
</rules>
</nlog>
See also: https://learn.microsoft.com/en-us/azure/app-service/troubleshoot-diagnostic-logs#stream-logs
For including Blob-output then take a look at NLog.Extensions.AzureBlobStorage
Alternative to Blob-output could be ApplicationInsights but it might have different pricing.

Test NG Cucumber Parallel Automation with Dynamic test tags and param

I am currently looking to run multiple test in cucumber in parallel using testng and I successfully managed to do that.
Now my requirement is rather than having multiple Test TAGS in the testNG file with different parameters take it from the maven command line. So I can do automation without editing the testNg.xml file. Is there a way to achieve it? Please find my current testng.xml configuration.
testng.xml
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd">
<suite name="Testng Cucumber Suite" thread-count="2" parallel="tests">
<!-- In order to run test cases please copy and one test and add make sure you add relevant parameters -->
<test name="Run_Nexus_06">
<parameter name="deviceName" value="Google Nexus 6" />
<parameter name="platformVersion" value="6.0" />
<classes>
<class name="cucumber.mobile.ParallelRunner">
</class>
</classes>
</test>
<test name="Run_Google_Pixel">
<parameter name="deviceName" value="Google Pixel" />
<parameter name="platformVersion" value="7.1" />
<classes>
<class name="cucumber.mobile.ParallelRunner">
</class>
</classes>
</test>
</suite>
Runner Class :
#CucumberOptions(plugin = {"pretty", "html:target/html/", "json:target/cucumber.json", "junit:TEST-all.xml"},
features = "src/test/resources/features/SignUp.feature", glue = {"steps"}, tags = {"#Mobile"})
public class ParallelRunner extends Hook{
List<Object[]> data;
//<parameter name="deviceName" value="Google Pixel" />
// <parameter name="platformVersion" value="7.1" />
#BeforeTest
#Parameters({"deviceName","platformVersion"})
public void bb(String deviceName, String platformVersion){
Device device = new Device();
device.setDeviceName(deviceName);
device.setOsVersion(platformVersion);
DeviceFactory.setDevice(device);
System.out.println("Device" + deviceName + "Os Version" + platformVersion + " " + Thread.currentThread().getId());
}
}
I was glad that I found this post. Very useful if anyone trying to achieve the same thing. Dynamic Test ng IAlterSuiteListener
Maven command :> mvn compile test -DdeviceFlavors="Google Nexus 6","Google Pixel" -DdeviceOsFlavors="6.0","7.1" -Dsurefire.suiteXmlFiles=testng.xml

NLog default-target-parameters from Xml configuration are not applied to programmatically added file targets

I'm working on .Net Core 2.0 console application. NLog version 4.5.7.
Here is my config file:
<?xml version="1.0" encoding="utf-8" ?>
<nlog
xmlns="http://www.nlog-project.org/schemas/NLog.xsd"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
throwExceptions="false"
internalLogFile="Logs/Internal/log.txt"
internalLogLevel="Warn"
internalLogToConsole="false"
internalLogToConsoleError="false"
internalLogToTrace="false"
internalLogIncludeTimestamp="true">
<targets>
<default-wrapper
xsi:type="AsyncWrapper"
timeToSleepBetweenBatches="0"
overflowAction="Block"/>
<default-target-parameters
xsi:type="File"
layout="${longdate}|${level:uppercase=true}|${message}
${onexception:${newline}}${exception:format=tostring}"
archiveAboveSize="10485760"
maxArchiveFiles="10"
archiveNumbering="DateAndSequence"
archiveOldFileOnStartup="true"
keepFileOpen="true"
cleanupFileName="false"/>
<target
name="driver"
xsi:type="File"
archiveFileName="Logs/Driver/Archives/log.{#####}.txt"
fileName="Logs/Driver/log.txt"/>
</targets>
<rules>
<logger
name="driver"
minlevel="Trace"
writeTo="driver"/>
</rules>
</nlog>
I'm setting this config on startup of my application:
LogManager.Configuration = new XmlLoggingConfiguration(<path>, false);
And then I'm adding file targets to this configuration programmatically:
string
deviceTag = string.Concat("Device_", _id.ToString()),
loggerName = string.Concat("logger_", deviceTag);
LogManager.Configuration.AddRuleForAllLevels(
new FileTarget(string.Concat("target_", deviceTag))
{
FileName = string.Concat("Logs/", deviceTag, "/log.txt"),
ArchiveFileName = string.Concat("Logs/", deviceTag,
"/Archives/log.{#####}.txt")
}, loggerName);
_logger = LogManager.GetLogger(loggerName);
LogManager.ReconfigExistingLoggers();
Target is working, logs are written. But default-target-parameters from xml config are ignored.
This is a bug or a feature?
Or I do something wrong?

Set log file limit with log4net and azure file appender

I'm currently using log4net and azure files to store my logs, works ace.
I've been searching and can't find any configuration to make the logger create files no bigger than a given KB size.
This is the configuration I have:
<rollingStyle value="Size" />
<MaxSizeRollBackups value="10" />
<MaximumFileSize value="10KB" />
<AzureStorageConnectionString value="connectiondatahere" />
<ShareName value="filelog" />
<Path value="processor" />
<File value="processor_{yyyy-MM-dd}.txt" />
<layout type="log4net.Layout.PatternLayout">
<ConversionPattern value="%date %-5level %logger %message%newline"/>
</layout>
</appender>
<root>
<level value="ALL" />
<appender-ref ref="AzureFileAppender"/>
</root>
I've tried a few variations of this configuration but no luck.
After reviewed the source code of log4net-appender-azurefilestorage, I found the log file size limit is not support in azure file appender currently. I suggest you rewrite the azure file appender by yourself and add the size limit feature.
Below are the steps to do it.
Step 1, Add a property named MaximumFileSize to AzureFileAppender class.
public int MaximumFileSize { get; set; }
Step 2, add the size limit code when appending log to file.
protected override void Append(LoggingEvent loggingEvent)
{
Initialise(loggingEvent);
var buffer = Encoding.UTF8.GetBytes(RenderLoggingEvent(loggingEvent));
if ((_file.Properties.Length + buffer.Length) > MaximumFileSize)
{
//do something if the file reach the max file size
}
else
{
_file.Resize(_file.Properties.Length + buffer.Length);
using (var fileStream = _file.OpenWrite(null))
{
fileStream.Seek(buffer.Length * -1, SeekOrigin.End);
fileStream.Write(buffer, 0, buffer.Length);
}
}
}
Step 3, After that, you could add the size limit(per byte) to configuration file.
<MaximumFileSize value="10240" />

logstash: in log4j-input, the "path" is not correct

In my config file, I use
input { log4j {} }
and:
output { stdout { codec => rubydebug } }
I've attached my log4j to logstash using SocketListener. When my app prints something to the log, I see in logstash:
{
"message" => "<the message>",
"#version" => "1",
"#timestamp" => "2015-06-05T20:28:23.312Z",
"type" => "log4j",
"host" => "127.0.0.1:52083",
"path" => "com.ohadr.logs_provider.MyServlet",
"priority" => "INFO",
"logger_name" => "com.ohadr.logs_provider.MyServlet",
"thread" => "http-apr-8080-exec-3",
"class" => "?",
"file" => "?:?",
"method" => "?",
}
the issue is that the "path" field is wrong: AFAI understand, it should be the path of the log file; instead, I get the same value as "logger_name".
I have several apps on my tomcat that I want to collect the logs from. I need "path" to be the path-of-file (including the file-name), so I can distinguish between logs from different apps (each app logs to a different file).
How can it be done?
thanks!
The log4j input is a listener on a TCP socket. There is no file path.
To solve your challenge, you can either configure multiple TCP ports, so every application logs to a different TCP port or you could use GELF. GELF is an UDP-based protocol, but you need additional jars. logstash supports also GELF as native input. You can specify in many GELF appenders static fields, so you can distinguish on application level, which application is currently logging.
You can find here an example:
<appender name="gelf" class="biz.paluch.logging.gelf.log4j.GelfLogAppender">
<param name="Threshold" value="INFO" />
<param name="Host" value="udp:localhost" />
<param name="Port" value="12201" />
<param name="Version" value="1.1" />
<param name="Facility" value="java-test" />
<param name="ExtractStackTrace" value="true" />
<param name="FilterStackTrace" value="true" />
<param name="MdcProfiling" value="true" />
<param name="TimestampPattern" value="yyyy-MM-dd HH:mm:ss,SSSS" />
<param name="MaximumMessageSize" value="8192" />
<!-- This are static fields -->
<param name="AdditionalFields" value="fieldName1=fieldValue1,fieldName2=fieldValue2" />
<!-- This are fields using MDC -->
<param name="MdcFields" value="mdcField1,mdcField2" />
<param name="DynamicMdcFields" value="mdc.*,(mdc|MDC)fields" />
<param name="IncludeFullMdc" value="true" />
</appender>
HTH, Mark

Resources