Nifi: how to convert multiple log file in one file? - groovy

I have many internal log , which i write myself inside nifi environment, i want to input all this data inside one log flowFile, but this code trows Null pointer exception what should i change?
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.apache.nifi.processor.FlowFileFilter;
import groovy.json.JsonSlurper;
import groovy.json.JsonBuilder;
def flowFile = session.get();
def n=0;
if(!flowFile)return
def size = flowFile.getAttribute('fileSize');
int value = size as Integer;
log.error("g");
if((value/338)>1){
def ffList = session.get(new FlowFileFilter(){
public FlowFileFilterResult filter(FlowFile ff) {
if( size == ff.getAttribute('fileSize') ){ n++; return FlowFileFilterResult.ACCEPT_AND_CONTINUE;
}
else{
return FlowFileFilterResult.REJECT_AND_CONTINUE
}
}
})
session.transfer(ffList[n-1],REL_SUCCESS);
session.remove( ffList[0..-2] )
session.remove(flowFile);
}
else{
session.transfer(flowFile,REL_SUCCESS);
}

Related

Getting Empty Array Using Groovy Script in Nifi

I have an requirement where I need to parse the data into the required format
Input:
{
"Message" : "\nRecord 1:\nRequired data is missing. \n\nRecord 2:\nprocessing failed\n"
}
Here the content and delimiters are not fixed. The fixed part is only /nRecord keyword on which I am writing the Script. But I am not getting desired Output using Groovy.
desired Output:
[
{
"Record 1": "nRequired data is missing"
},
{
"Record 2": "processing failed"
}
]
I have written Groovy Script for the same but I am getting empty array.
import org.apache.commons.io.IOUtils
import groovy.json.*
import java.util.ArrayList
import java.nio.charset.*
import java.nio.charset.StandardCharsets
import groovy.json.JsonSlurper
import groovy.json.JsonBuilder
def flowFile = session.get()
if(!flowFile) return
try {
flowFile = session.write(flowFile,
{ inputStream, outputStream ->
def text = IOUtils.toString(inputStream, StandardCharsets.UTF_8)
splitted = text.split('\nRecord')
int j = splitted.size()
final1 = []
for (int i=0;i<j-1;i++)
{
k = "Record " + splitted[i+1]
valid = k.replaceAll("\\n|\"|\\n|}","")
final1.add("{\"" + valid.replaceFirst(":",'":"')+ "\"}" )
}
def json = JsonOutput.toJson(final1)
outputStream.write(JsonOutput.prettyPrint(json).getBytes(StandardCharsets.UTF_8))
} as StreamCallback)
session.transfer(flowFile, REL_SUCCESS)
} catch(Exception e) {
log.error('Error during JSON operations', e)
flowFile = session.putAttribute(flowFile, "error", e.getMessage())
session.transfer(flowFile, REL_FAILURE)
}
Can you please help me with the same.
Thank you.
I would use a regex with a simple trick:
import groovy.json.*
def json = new JsonSlurper().parseText '{ "Message" : "\nRecord 1:\nRequired data is missing. \n\nRecord 2:\nprocessing failed\nRecord 3:\nprocessing failed badly\n" }'
String msg = json.Message.replaceAll( /\n+(Record \d+:)/, '=$1' ) // THE trick!
List res = ( msg =~ /(?m)(Record \d+):([^=]+)*/ ).collect{ _, rec, text -> [ (rec):text.trim() ] }
assert JsonOutput.toJson( res ) == '[{"Record 1":"Required data is missing."},{"Record 2":"processing failed"},{"Record 3":"processing failed badly"}]'

How to move a component in Sonatype Nexus 3?

I'm trying to promote a component together with all it's assets from our staging repo to the released repo. So far I've managed to create the component so that it get's detected when trying to recreate it but it won't show up in searches or browsing. I'm pretty sure I need to emit an event for it to show up but even after rebuilding the indexes it won't show up so I must have missed something else as well.
import org.sonatype.nexus.repository.storage.Asset
import org.sonatype.nexus.repository.storage.Query
import org.sonatype.nexus.repository.storage.Component
import org.sonatype.nexus.repository.storage.StorageFacet
import groovy.json.JsonOutput
import groovy.json.JsonSlurper
def request = new JsonSlurper().parseText(args)
assert request.fromRepoName: 'fromRepo parameter is required'
assert request.toRepoName: 'toRepo parameter is required'
assert request.artifactId: 'artifactId parameter is required'
assert request.groupId: 'groupId parameter is required'
assert request.version: 'version parameter is required'
log.info("Moving ${request.groupId}:${request.artifactId}:${request.version} from ${request.fromRepoName} to ${request.toRepoName}")
def fromRepo = repository.repositoryManager.get(request.fromRepoName)
def toRepo = repository.repositoryManager.get(request.toRepoName)
StorageFacet fromStorageFacet = fromRepo.facet(StorageFacet)
StorageFacet toStorageFacet = toRepo.facet(StorageFacet)
def fromTx = fromStorageFacet.txSupplier().get()
def toTx = toStorageFacet.txSupplier().get()
try {
fromTx.begin()
log.info("Transaction started for repo: ${request.fromRepoName}")
Iterable<Component> components = fromTx.
findComponents((Query.builder().where('name == ').param(request.artifactId).and('group == ').param(request.groupId) & 'version == ').param(request.version).build(), [fromRepo])
log.info("Trying to get component.")
component = components.iterator().next()
try {
toTx.begin()
log.info("Transaction started for repo: ${request.toRepoName}")
if (toTx.componentExists(component.group(), component.name(), component.version(), toRepo)) {
log.info("Component already exists, deleting.")
Iterable<Component> componentsToDelete = toTx.
findComponents((Query.builder().where('name == ').param(request.artifactId).and('group == ').param(request.groupId) & 'version == ').param(request.version).build(), [toRepo])
log.info("Trying to get component.")
componentToDelete = componentsToDelete.iterator().next()
toTx.deleteComponent(componentToDelete)
}
log.info("Creating new component with name: ${component.name()}, group: ${component.group()}, version: ${component.version()} in repo ${request.toRepoName}")
newComponent = toTx.createComponent(toTx.findBucket(toRepo), toRepo.getFormat())
.name(component.name())
.group(component.group())
.version(component.version())
.attributes(component.attributes())
newComponent.setEntityMetadata(component.getEntityMetadata())
log.info("Component metadata: ${component.getEntityMetadata()}")
log.info("New component successfully created.")
fromTx.browseAssets(component).each {Asset asset ->
log.info("Attaching ${asset.name()} to new component.")
log.info("Asset metadata: ${asset}")
newAsset = toTx.createAsset(toTx.findBucket(toRepo), newComponent)
.name(asset.name())
.attributes(asset.attributes())
newAsset.setEntityMetadata(asset.getEntityMetadata())
log.info("Asset entityMetadata: ${asset.getEntityMetadata()}")
log.info("New asset metadata: ${newAsset}")
log.info("Trying to save asset.")
toTx.saveAsset(newAsset)
}
toTx.saveComponent(newComponent)
toTx.commit()
log.info("Transaction commited for repo: ${request.toRepoName}")
} finally {
toTx.close()
}
log.info("Deleting component with name: ${component.name()}, group: ${component.group()}, version: ${component.version()} in repo ${request.fromRepoName}")
fromTx.deleteComponent(component)
fromTx.commit()
log.info("Transaction commited for repo: ${request.fromRepoName}")
}
finally {
fromTx.close()
}
def result = JsonOutput.toJson([
status: "Success"
])
return result
For added context I'm trying to setup a CI/CD flow with Jenkins Pipelines & Maven based on this: https://www.cloudbees.com/blog/new-way-do-continuous-delivery-maven-and-jenkins-pipeline
Not sure if you solved this already but seems like I am trying to achieve a very similar solution.
I've found that if I add the assets into the destination repository using the MavenFacet or RawContentFacet (depending on the underlying repository format) and using the put method then the uploaded assets do show up in browse and search.
This is the code I have come up with:
import org.sonatype.nexus.repository.storage.StorageFacet
import org.sonatype.nexus.repository.transaction.TransactionalStoreBlob
import org.sonatype.nexus.repository.view.payloads.BlobPayload
import org.sonatype.nexus.repository.storage.Component
import org.sonatype.nexus.repository.storage.Query
import org.sonatype.nexus.repository.maven.MavenFacet
import org.sonatype.nexus.repository.maven.internal.Maven2Format
import org.sonatype.nexus.repository.raw.RawContentFacet
import org.sonatype.nexus.repository.raw.internal.RawFormat
import org.sonatype.nexus.repository.storage.Asset
import org.sonatype.nexus.blobstore.api.BlobStoreManager
import org.sonatype.nexus.repository.manager.RepositoryManager
RepositoryManager rManager = repository.repositoryManager
BlobStoreManager bManager = blobStore.blobStoreManager
def STAGING = 'staging'
def input = new groovy.json.JsonSlurper().parseText(args)
for (item in input.items) {
assert item.to: 'item.to parameter is required'
assert item.group: 'item.group parameter is required'
assert item.artifact: 'item.artifact parameter is required'
assert item.version: 'item.version parameter is required'
assert item.to != STAGING: 'item.to cannot be the staging area'
// Check the repository exists
def toRepo = repository.repositoryManager.get(item.to)
assert toRepo: 'item.to is not a valid repository name'
// Currently only support these payload types
assert [Maven2Format.NAME, RawFormat.NAME].contains(toRepo.getFormat().toString()): 'Unknown Format ' + toRepo.getFormat().toString()
}
def repo = rManager.get(STAGING)
def tx = repo.facet(StorageFacet).txSupplier().get()
def deNest = new ArrayList()
try
{
tx.begin()
for (item in input.items) {
Iterable<Component> components = tx.findComponents(
Query.builder().where('name == ').param(item.artifact).and('group == ').param(item.group)
.and('version == ').param(item.version).build(), [repo])
Component component = components.iterator().next();
tx.browseAssets(component).each { Asset asset ->
def br = asset.requireBlobRef()
deNest.add([
to: item.to,
store: br.getStore(),
blobId: br.getBlobId(),
asset: asset.name(),
contentType: asset.contentType()
])
}
}
tx.commit()
}
finally
{
tx.close()
}
for (dn in deNest) {
def toRepo = rManager.get(dn.to)
def payload = new BlobPayload(bManager.get(dn.store).get(dn.blobId), dn.contentType)
TransactionalStoreBlob.operation.withDb(toRepo.facet(StorageFacet).txSupplier()).call {
switch (toRepo.getFormat().toString()) {
case Maven2Format.NAME:
def toTx = toRepo.facet(MavenFacet)
toTx.put(toTx.mavenPathParser.parsePath(dn.asset), payload)
break
case RawFormat.NAME:
def toTx = toRepo.facet(RawContentFacet)
toTx.put(dn.asset, payload)
break
default:
throw new RuntimeException('Unknown Format ' + toRepo.getFormat().toString())
}
}
}
return groovy.json.JsonOutput.toJson([success: true])
The script below works for NPM repositories within a single blob. Haven't tried it for other cases. Nexus 3.43.0-01
import groovy.json.JsonOutput
import groovy.json.JsonSlurper
import org.joda.time.DateTime
import org.sonatype.nexus.repository.Format
import org.sonatype.nexus.repository.manager.RepositoryManager
import org.sonatype.nexus.repository.storage.Asset
import org.sonatype.nexus.repository.storage.Component
import org.sonatype.nexus.repository.storage.Query
import org.sonatype.nexus.repository.storage.StorageFacet
def request = new JsonSlurper().parseText(args);
assert request.srcRepoName: 'srcRepoName parameter is required';
assert request.dstRepoName: 'dstRepoName parameter is required';
def resultCode = 0;
def errMessage = '';
log.info('Try to sync assets from ' + request.srcRepoName + ' to ' + request.dstRepoName);
RepositoryManager repositoryManager = repository.repositoryManager;
def srcRepo = repositoryManager.get(request.srcRepoName);
StorageFacet srcStorageFacet = srcRepo.facet(StorageFacet);
def srcTx = srcStorageFacet.txSupplier().get();
class SrcFormat extends Format {
SrcFormat(String format) {
super(format);
}
};
try {
srcTx.begin();
log.info('syncRepos. Try to find source components');
Iterable<Component> srcComponents = srcTx.findComponents(Query.builder().where('1').eq(1).build(), [srcRepo]);
log.info('syncRepos. There are ' + srcComponents.size() + ' source components found');
srcTx.commit();
if (srcComponents != null && srcComponents.size() > 0) {
log.info('syncRepos. Try to create destination components and assets');
def dstRepo = repositoryManager.get(request.dstRepoName);
srcComponents.each {srcComponent ->
srcTx.begin();
Component dstComponent = null;
if (srcTx.componentExists(srcComponent.group(), srcComponent.name(), srcComponent.version(), dstRepo)) {
dstComponent = srcTx.findComponents(Query.builder().where('name').eq(srcComponent.name()).build(), [srcRepo])[0];
} else {
dstComponent = srcTx.createComponent(srcTx.findBucket(dstRepo), new SrcFormat(srcComponent.format()))
.name(srcComponent.name())
.group(srcComponent.group())
.version(srcComponent.version())
.attributes(srcComponent.attributes());
dstComponent.format(srcComponent.format());
srcTx.saveComponent(dstComponent);
};
srcTx.browseAssets(srcComponent).each { srcAsset ->
if (!srcTx.assetExists(srcAsset.name(), dstRepo)) {
def newAsset = srcTx.createAsset(srcTx.findBucket(dstRepo), dstComponent)
.name(srcAsset.name())
.attributes(srcAsset.attributes())
.blobRef(srcAsset.requireBlobRef());
newAsset.size(srcAsset.size());
newAsset.contentType(srcAsset.contentType());
newAsset.lastDownloaded(DateTime.now());
srcTx.saveAsset(newAsset);
};
};
srcTx.commit();
};
srcTx.begin();
log.info('syncRepos. Try to find source assets by application/json contentType');
Iterable<Asset> srcAssets = srcTx.findAssets(Query.builder().where('content_type').eq('application/json').build(), [srcRepo]);
log.info('syncRepos. There are ' + srcAssets.size() + ' source assets found');
srcAssets.each { srcAsset ->
if (!srcTx.assetExists(srcAsset.name(), dstRepo)) {
def newAsset = srcTx.createAsset(srcTx.findBucket(dstRepo), new SrcFormat(srcAsset.format()))
.name(srcAsset.name())
.attributes(srcAsset.attributes())
.blobRef(srcAsset.requireBlobRef());
newAsset.size(srcAsset.size());
newAsset.contentType(srcAsset.contentType());
newAsset.lastDownloaded(DateTime.now());
srcTx.saveAsset(newAsset);
};
};
srcTx.commit();
};
} catch (Exception e) {
log.error('syncRepos. Exception when sync repos: ' + e.message);
srcTx.rollback();
resultCode = -1;
errMessage = e.message;
} finally {
srcTx.close();
};
log.info('syncRepos. Try to return result');
def result = JsonOutput.toJson([
srcRepoName: request.srcRepoName,
dstRepoName: request.dstRepoName,
resultCode: resultCode,
errMessage: errMessage
]);
return result;

How to remove negative index error in groovy code?

I have code logic inside nifi processor (executeScript processor) which will reduce log files(in this case in my log files i have same text so i want to remove duplicates and i try to choose them by name and file size),but i sometimes ( not always) got negative index error, what should i change in my code to make it work?
import org.apache.nifi.processor.FlowFileFilter;
import groovy.json.JsonSlurper
import groovy.json.JsonBuilder
import java.nio.charset.StandardCharsets
import org.apache.commons.io.IOUtils
def flowFile = session.get();
def n=0;
if(!flowFile)return
def size = flowFile.getAttribute('Size');
int value = size as Integer;
if((value/338)>=1){
def ffList = session.get(new FlowFileFilter(){
public FlowFileFilterResult filter(FlowFile ff) {
if( size == ff.getAttribute('Size') ){ n++; return FlowFileFilterResult.ACCEPT_AND_CONTINUE;}
else{ return FlowFileFilterResult.REJECT_AND_CONTINUE; }
}
});
session.transfer(ffList[n-1],REL_SUCCESS);
session.remove( ffList[0..-2] )
session.remove(flowFile);
}
else{
session.transfer(flowFile,REL_SUCCESS);
}
I think you should check the size of list before accessing elements.
def ffSize = ffList.size()
if (n <= ffSize && ffSize >= 2) {
session.transfer(ffList[n - 1], REL_SUCCESS)
session.remove(ffList[0..-2])
session.remove(flowFile)
}

Nifi: how to transfer only last file from the flowFile list?

I want to get all flowfile from the queu which fileSize is greater than 831 and then put them into the list after that catch last flowfile from the list and transfer to success relationship and finally remove all other flowfiles , here is my code which throws exception that transfer relationship not specified , what should i change in this case?
import org.apache.nifi.processor.FlowFileFilter;
import groovy.json.JsonSlurper
import groovy.json.JsonBuilder
import java.nio.charset.StandardCharsets
import org.apache.commons.io.IOUtils
def flowFile = session.get()
def n=0;
if(!flowFile)return
def size = flowFile.getAttribute('fileSize');
log.error(size.toString())
int value = size as Integer;
if((value/831)>1){
def ffList = session.get(new FlowFileFilter(){
public FlowFileFilterResult filter(FlowFile ff) {
if( size == ff.getAttribute('fileSize') ) n++; return FlowFileFilterResult.ACCEPT_AND_CONTINUE
return FlowFileFilterResult.REJECT_AND_CONTINUE
}
})
session.transfer(ffList.get(n-1),REL_SUCCESS)
//session.remove(ffList);
}
session.remove(flowFile);
if you get flow file from queue you have to do something with it.
this code returns you a flowFile list:
def ffList = session.get(new FlowFileFilter(){...})
if you wand just remove all of them except the last one just put this code after transferring the last one:
session.remove( ffList[0..-2] )
and i guess there is a mistake in this line:
if( size == ff.getAttribute('fileSize') ) n++; return FlowFileFilterResult.ACCEPT_AND_CONTINUE
the command return FlowFileFilterResult.ACCEPT_AND_CONTINUE executed in any case because it's not under if.
i think should be like this:
if( size == ff.getAttribute('fileSize') ){
n++;
return FlowFileFilterResult.ACCEPT_AND_CONTINUE
}

Soapui unexpected end of file

I'm working on a project with Soapui when I send the request I have this error unexpected end of file here is my code:
import java.io.FileNotFoundException
import java.io.FileOutputStream
import java.io.IOException
import java.util.Date
import java.io.*
import org.apache.poi.hssf.usermodel.HSSFCell
import org.apache.poi.hssf.usermodel.HSSFCellStyle
import org.apache.poi.hssf.usermodel.HSSFDataFormat
import org.apache.poi.hssf.usermodel.HSSFRow
import org.apache.poi.hssf.usermodel.HSSFSheet
import org.apache.poi.hssf.usermodel.HSSFWorkbook
import org.apache.poi.hssf.util.HSSFColor
import org.apache.poi.xssf.usermodel.*
import com.opencsv.CSVReader
import com.opencsv.CSVWriter
CSVReader reader = new CSVReader(new FileReader("C:\\Users\\******\\Desktop\\Groovy Script test\\data.csv"))
CSVWriter writer = new CSVWriter(new FileWriter("C:\\Users\\******\\Desktop\\Groovy Script test\\Output.csv"))
//get property
propTestStep = context.testCase.getTestStepByName("PRO-Number-property") // get the Property step (ours is named "property-loop")
//loop
String [] nextLine
while ((nextLine = reader.readNext()) != null ) {
writer.writeNext(nextLine)
log.info ("The value on the excel sheet is data " + nextLine[5])
//assign value of pro
propTestStep.setPropertyValue("PRO Number", (nextLine[2])) //set the value of Pro" property equal to Excel's column B ( getCell(1) )
//Get node of the response
testRunner.runTestStepByName("GetByProNumber") //we're going to run the Soap Request after each iteration of the Excel's rows.
def groovyUtils = new com.eviware.soapui.support.GroovyUtils( context )
if( "GetByProNumber#Response" == null){
log.info("Null try again")
} else {
def holder = groovyUtils.getXmlHolder("GetByProNumber#Response")
log.info (holder)
holder.namespaces["ns"] = "http://www.SaiaSecure.com/WebService/Shipment"
if (holder != null){
writer.writeNext("${holder.getNodeValue('//ns:Activity')}", nextLine[0])
log.info("Node Response:${holder.getNodeValue('//ns:Activity')}")
} else {
log.info("Null try again")
}
}
}// end of while loop
//writer.close()
writer.close()
log.info ("Done" )
Please I want to know what I'm doing wrong Thanks for any help I'm working on a project with Soapui when I send the request I have this error unexpected end of file here is my code:
Oh ok, your comment is very useful to identify the issue.
That appears to be a trivial error.
Change your code snippet from:
def holder = groovyUtils.getXmlHolder("GetByProNumber#Response")
To:
//Get the specified test step response and assign to variable
def stepResponse = context.expand('${GetByProNumber#Response}')
//Now pass the above variable to xmlholder to create the object
def holder = groovyUtils.getXmlHolder(stepResponse)

Resources