i am tried to find the link wall face,but when i use the reference to create a new dimension , i will get result about 'invaild number of references'. i cant trans link face to active face.
public Result Execute(ExternalCommandData commandData, ref string message, ElementSet elements)
{
UIDocument uidoc = commandData.Application.ActiveUIDocument;
Document doc = uidoc.Document;
var rf1 = uidoc.Selection.PickObject(ObjectType.PointOnElement, "select");
var element1 = doc.GetElement(rf1);
var location = element1.Location as LocationPoint;
var point = location.Point;
var rf2 = uidoc.Selection.PickObject(ObjectType.LinkedElement, "select");
var linkElement = doc.GetElement(rf2) as RevitLinkInstance;
var linkDoc = linkElement.GetLinkDocument();
var linkWall = linkDoc.GetElement(rf2.LinkedElementId) as Wall;
var wallLocation = linkWall.Location as LocationCurve;
var curve = wallLocation.Curve;
var cRf = curve.Reference;
var solid = BIMTools.Geometry.GetSolid(linkWall);
Face face = null;
foreach (var solidFace in solid.Faces)
{
XYZ normal = ((Face)solidFace).ComputeNormal(new UV(0, 0));
if (normal.Y < 0)
{
face = solidFace as Face;
break;
}
}
var viewLevel = uidoc.ActiveView.GenLevel.Elevation;
var tPoint = new XYZ(point.X,(face as PlanarFace).Origin.Y, viewLevel);
point = new XYZ(point.X, point.Y, viewLevel);
var line = Line.CreateBound(point, tPoint);
var references = new ReferenceArray();
references.Append(rf1);
references.Append(face.Reference);
using (Transaction trans = new Transaction(doc,"create"))
{
trans.Start();
var dimension = doc.Create.NewDimension(uidoc.ActiveView, line, references);
trans.Commit();
}
return Result.Succeeded;
}
The Building Coder provides a whole list of articles on creating dimensioning.
Related
I'm trying to make a script that will go through the swatches list and will search for a swatch named "Line".
if it will not be found I want to add it but if it already been found I want the script to be ignored.
this is what I did so far, the problem is that if it finds a swatch named "Line" it will create a new pink color for some reason.
var doc = app.activeDocument;
for (i = 0; i < doc.swatches.length; i++)
{
if (doc.swatches[i].name != "Line")
{
var newSpot = doc.spots.add();
var newColor = new CMYKColor();
newColor.cyan = 100;
newColor.magenta = 100;
newColor.yellow = 100;
newColor.black = 100;
newSpot.name = "Line";
newSpot.colorType = ColorModel.SPOT;
newSpot.color = newColor;
}
}
You can try to pick the color by its name first and create a new color only if there is the error:
var doc = app.activeDocument;
try { var color = doc.swatches.getByName('Line') }
catch(e) {
var newColor = new CMYKColor();
newColor.cyan = 100;
newColor.magenta = 100;
newColor.yellow = 100;
newColor.black = 100;
var newSpot = doc.spots.add();
newSpot.name = 'Line';
newSpot.colorType = ColorModel.SPOT;
newSpot.color = newColor;
}
I'm using the SuiteTalk (API) service for NetSuite to retrieve a list of Assemblies. I need to load the InventoryDetails fields on the results to view the serial/lot numbers assigned to the items. This is the current code that I'm using, but the results still show those fields to come back as NULL, although I can see the other fields for the AssemblyBuild object. How do I get the inventory details (serials/lot#'s) to return on a transaction search?
public static List<AssemblyBuildResult> Get()
{
var listAssemblyBuilds = new List<AssemblyBuildResult>();
var service = Service.Context();
var ts = new TransactionSearch();
var tsb = new TransactionSearchBasic();
var sfType = new SearchEnumMultiSelectField
{
#operator = SearchEnumMultiSelectFieldOperator.anyOf,
operatorSpecified = true,
searchValue = new string[] { "_assemblyBuild" }
};
tsb.type = sfType;
ts.basic = tsb;
ts.inventoryDetailJoin = new InventoryDetailSearchBasic();
// perform the search
var response = service.search(ts);
response.pageSizeSpecified = true;
// Process response
if (response.status.isSuccess)
{
// Process the records returned in the response
// Get more records with pagination
if (response.totalRecords > 0)
{
for (var x = 1; x <= response.totalPages; x++)
{
var records = response.recordList;
foreach (var t in records)
{
var ab = (AssemblyBuild) t;
listAssemblyBuilds.Add(GetAssemblyBuildsResult(ab));
}
if (response.pageIndex < response.totalPages)
{
response = service.searchMoreWithId(response.searchId, x + 1);
}
}
}
}
// Parse and return NetSuite WorkOrder into assembly WorkOrderResult list
return listAssemblyBuilds;
}
After much pain and suffering, I was able to solve this problem with the following code:
/// <summary>
/// Returns List of AssemblyBuilds from NetSuite
/// </summary>
/// <returns></returns>
public static List<AssemblyBuildResult> Get(string id = "", bool getDetails = false)
{
// Object to populate and return results
var listAssemblyBuilds = new List<AssemblyBuildResult>();
// Initiate Service and SavedSearch (TransactionSearchAdvanced)
var service = Service.Context();
var tsa = new TransactionSearchAdvanced
{
savedSearchScriptId = "customsearch_web_assemblysearchmainlist"
};
// Filter by ID if specified
if (id != "")
{
tsa.criteria = new TransactionSearch()
{
basic = new TransactionSearchBasic()
{
internalId = new SearchMultiSelectField
{
#operator = SearchMultiSelectFieldOperator.anyOf,
operatorSpecified = true,
searchValue = new[] {
new RecordRef() {
type = RecordType.assemblyBuild,
typeSpecified = true,
internalId = id
}
}
}
}
};
}
// Construct custom columns to return
var tsr = new TransactionSearchRow();
var tsrb = new TransactionSearchRowBasic();
var orderIdCols = new SearchColumnSelectField[1];
var orderIdCol = new SearchColumnSelectField();
orderIdCols[0] = orderIdCol;
tsrb.internalId = orderIdCols;
var tranDateCols = new SearchColumnDateField[1];
var tranDateCol = new SearchColumnDateField();
tranDateCols[0] = tranDateCol;
tsrb.tranDate = tranDateCols;
var serialNumberCols = new SearchColumnStringField[1];
var serialNumberCol = new SearchColumnStringField();
serialNumberCols[0] = serialNumberCol;
tsrb.serialNumbers = serialNumberCols;
// Perform the Search
tsr.basic = tsrb;
tsa.columns = tsr;
var response = service.search(tsa);
// Process response
if (response.status.isSuccess)
{
var searchRows = response.searchRowList;
if (searchRows != null && searchRows.Length >= 1)
{
foreach (SearchRow t in searchRows)
{
var transactionRow = (TransactionSearchRow)t;
listAssemblyBuilds.Add(GetAssemblyBuildsResult(transactionRow, getDetails));
}
}
}
// Parse and return NetSuite WorkOrder into assembly WorkOrderResult list
return listAssemblyBuilds;
}
private static string GetAssemblyBuildLotNumbers(string id)
{
var service = Service.Context();
var serialNumbers = "";
var tsa = new TransactionSearchAdvanced
{
savedSearchScriptId = "customsearch_web_assemblysearchlineitems"
};
service.searchPreferences = new SearchPreferences { bodyFieldsOnly = false };
tsa.criteria = new TransactionSearch()
{
basic = new TransactionSearchBasic()
{
internalId = new SearchMultiSelectField
{
#operator = SearchMultiSelectFieldOperator.anyOf,
operatorSpecified = true,
searchValue = new[] {
new RecordRef() {
type = RecordType.assemblyBuild,
typeSpecified = true,
internalId = id
}
}
}
}
};
// Construct custom columns to return
var tsr = new TransactionSearchRow();
var tsrb = new TransactionSearchRowBasic();
var orderIdCols = new SearchColumnSelectField[1];
var orderIdCol = new SearchColumnSelectField();
orderIdCols[0] = orderIdCol;
tsrb.internalId = orderIdCols;
var serialNumberCols = new SearchColumnStringField[1];
var serialNumberCol = new SearchColumnStringField();
serialNumberCols[0] = serialNumberCol;
tsrb.serialNumbers = serialNumberCols;
tsr.basic = tsrb;
tsa.columns = tsr;
var response = service.search(tsa);
if (response.status.isSuccess)
{
var searchRows = response.searchRowList;
if (searchRows != null && searchRows.Length >= 1)
{
foreach (SearchRow t in searchRows)
{
var transactionRow = (TransactionSearchRow)t;
if (transactionRow.basic.serialNumbers != null)
{
return transactionRow.basic.serialNumbers[0].searchValue;
}
}
}
}
return serialNumbers;
}
private static AssemblyBuildResult GetAssemblyBuildsResult(TransactionSearchRow tsr, bool getDetails)
{
if (tsr != null)
{
var assemblyInfo = new AssemblyBuildResult
{
NetSuiteId = tsr.basic.internalId[0].searchValue.internalId,
ManufacturedDate = tsr.basic.tranDate[0].searchValue,
SerialNumbers = tsr.basic.serialNumbers[0].searchValue
};
// If selected, this will do additional NetSuite queries to get detailed data (slower)
if (getDetails)
{
// Look up Lot Number
assemblyInfo.LotNumber = GetAssemblyBuildLotNumbers(tsr.basic.internalId[0].searchValue.internalId);
}
return assemblyInfo;
}
return null;
}
What I learned about pulling data from NetSuite:
Using SavedSearches is the best method to pull data that doesn't automatically come through in the API objects
It is barely supported
Don't specify an ID on the SavedSearch, specify a criteria in the TransactionSearch to get one record
You will need to specify which columns to actually pull down. NetSuite doesn't just send you the data from a SavedSearch automatically
You cannot view data in a SavedSearch that contains a Grouping
In the Saved Search, use the Criteria Main Line = true/false to read data from the main record (top of UI screen), and line items (bottom of screen)
I have a website hosted by Azure that includes a Web API which I'm using to develop an android app. I'm trying to upload a media file to the server where it's encoded by a media encoder and saved to a path. The encoder library is called "Media Toolkit" which I found here : https://www.nuget.org/packages/MediaToolkit/1.0.0.3
My server side code looks like this:
[HttpPost]
[Route("upload")]
public async Task<HttpResponseMessage> Upload(uploadFileModel model)
{
var result = new HttpResponseMessage(HttpStatusCode.OK);
if (ModelState.IsValid)
{
string thumbname = "";
string resizedthumbname = Guid.NewGuid() + "_yt.jpg";
string FfmpegPath = Encoding_Settings.FFMPEGPATH;
string tempFilePath = Path.Combine(HttpContext.Current.Server.MapPath("~/video"), model.fileName);
string pathToFiles = HttpContext.Current.Server.MapPath("~/video");
string pathToThumbs = HttpContext.Current.Server.MapPath("~/contents/member/" + model.username + "/thumbs");
string finalPath = HttpContext.Current.Server.MapPath("~/contents/member/" + model.username + "/flv");
string resizedthumb = Path.Combine(pathToThumbs, resizedthumbname);
var outputPathVid = new MediaFile { Filename = Path.Combine(finalPath, model.fileName) };
var inputPathVid = new MediaFile { Filename = Path.Combine(pathToFiles, model.fileName) };
int maxWidth = 380;
int maxHeight = 360;
var namewithoutext = Path.GetFileNameWithoutExtension(Path.Combine(pathToFiles, model.fileName));
thumbname = model.VideoThumbName;
string oldthumbpath = Path.Combine(pathToThumbs, thumbname);
var fileName = model.fileName;
try
{
File.WriteAllBytes(tempFilePath, model.data);
}
catch (Exception e)
{
Console.WriteLine(e.Message);
}
using (var engine = new Engine())
{
engine.GetMetadata(inputPathVid);
// Saves the frame located on the 15th second of the video.
var outputPathThumb = new MediaFile { Filename = Path.Combine(pathToThumbs, thumbname + ".jpg") };
var options = new ConversionOptions { Seek = TimeSpan.FromSeconds(0), CustomHeight = 360, CustomWidth = 380 };
engine.GetThumbnail(inputPathVid, outputPathThumb, options);
}
Image image = Image.FromFile(Path.Combine(pathToThumbs, thumbname + ".jpg"));
//var ratioX = (double)maxWidth / image.Width;
//var ratioY = (double)maxHeight / image.Height;
//var ratio = Math.Min(ratioX, ratioY);
var newWidth = (int)(maxWidth);
var newHeight = (int)(maxHeight);
var newImage = new Bitmap(newWidth, newHeight);
Graphics.FromImage(newImage).DrawImage(image, 0, 0, newWidth, newHeight);
Bitmap bmp = new Bitmap(newImage);
bmp.Save(Path.Combine(pathToThumbs, thumbname + "_resized.jpg"));
//File.Delete(Path.Combine(pathToThumbs, thumbname));
using (var engine = new Engine())
{
var conversionOptions = new ConversionOptions
{
VideoSize = VideoSize.Hd720,
AudioSampleRate = AudioSampleRate.Hz44100,
VideoAspectRatio = VideoAspectRatio.Default
};
engine.GetMetadata(inputPathVid);
engine.Convert(inputPathVid, outputPathVid, conversionOptions);
}
File.Delete(tempFilePath);
Video_Struct vd = new Video_Struct();
vd.CategoryID = 0; // store categoryname or term instead of category id
vd.Categories = "";
vd.UserName = model.username;
vd.Title = "";
vd.Description = "";
vd.Tags = "";
vd.Duration = inputPathVid.Metadata.Duration.ToString();
vd.Duration_Sec = Convert.ToInt32(inputPathVid.Metadata.Duration.Seconds.ToString());
vd.OriginalVideoFileName = model.fileName;
vd.VideoFileName = model.fileName;
vd.ThumbFileName = thumbname + "_resized.jpg";
vd.isPrivate = 0;
vd.AuthKey = "";
vd.isEnabled = 1;
vd.Response_VideoID = 0; // video responses
vd.isResponse = 0;
vd.isPublished = 1;
vd.isReviewed = 1;
vd.Thumb_Url = "none";
//vd.FLV_Url = flv_url;
vd.Embed_Script = "";
vd.isExternal = 0; // website own video, 1: embed video
vd.Type = 0;
vd.YoutubeID = "";
vd.isTagsreViewed = 1;
vd.Mode = 0; // filter videos based on website sections
//vd.ContentLength = f_contentlength;
vd.GalleryID = 0;
long videoid = VideoBLL.Process_Info(vd, false);
return result;
}
else
{
throw new HttpResponseException(Request.CreateResponse(HttpStatusCode.NotAcceptable, "This request is not properly formatted"));
}
}
When the debugger hits the line using (var engine = new Engine()) I get 500 internal server error thrown. I don't get this error testing it on the iis server. Since it works fine on my local server and not on the azure hosted server, I figured it had to do with the Azure service rather than an error in my code. If so is the case then how would I be able to get around this issue? I don't want to use azure blob storage as it would require a lot of changes to my code. Does anyone have any idea what might be the issue.
Any helpful suggestions are appreciated.
Server.MapPath works differently on Azure WebApps - change to:
string pathToFiles = HttpContext.Current.Server.MapPath("~//video");
Also, see this SO post for another approach.
I am getting the values of the properties of an object(top,width...) as an object is being scaled/moved by using this function :
canvas.on('object:scaling', onObjectModification);
canvas.on('object:moving', onObjectModification);
canvas.on('object:rotating', onObjectModification);
function onObjectModification(e) {
var activeObject = e.target;
var reachedLimit = false;
objectLeft = activeObject.left,
objectTop = activeObject.top,
objectWidth = activeObject.width,
objectHeight = activeObject.height,
canvasWidth = canvas.width,
canvasHeight = canvas.height;
}
How can I get the same for each object that are being moved as a group? I need the values to be constantly changing as the object:scaling event provide.
I know of the event selection:created but I am lost how to use that to attain what I want. Any help from you guys would be highly appreciated.
Thanks
during object scaling width and height will not change. They will be the same all the time, just scaleX and scaleY will change.
You have to write a function that will iterate on possibile group sub objects.
canvas.on('object:scaling', onObjectModification);
canvas.on('object:moving', onObjectModification);
canvas.on('object:rotating', onObjectModification);
function onObjectModification(e) {
var activeObject = e.target;
if (!activeObject) {
return;
}
var canvasWidth = canvas.width;
var canvasHeight = canvas.height;
var reachedLimit = false;
var objectLeft = activeObject.left;
var objectTop = activeObject.top;
// this provide scaled height and width
var objectWidth = activeObject.getWidth();
var objectHeight = activeObject.getHeight();
if (activeObject._objects) {
objs = activeObject._objects;
for (var i= 0; i < objs.length; i++) {
var obj = objs[i];
var objWidth = activeObject.getWidth() * activeObject.scaleX;
var objHeight = activeObject.getHeight() * activeObject.scaleY;
}
}
}
I need to offer a feature which allows InDesign users to select a page range in an InDesign document and create a new document out of those pages. This sounds simple, but it isn't...
I have tried many different ways of doing this but they have all failed to some degree. Some methods put all pages in a single spread (which sometimes makes InDesign crash). The best I've been able to do (see code below) still has problems at the beginning and the end (see screenshots below):
The original document:
The new document:
The question: How can I create a new document out of a subset of another document's pages (in InDesign using ExtendScript) without having the problems shown in the screenshots?
note: The behavior of the script is quite different in CS5.5 and CS6. My question concerns CS6.
The second screenshot was obtained by applying the following code to the document shown in the first screenshot:
CODE
var firstPageName = { editContents: "117" }; // This page number is actually entered by the user in an integerEditbox
var lastPageName = { editContents: "136" }; // This page number is actually entered by the user in an integerEditbox
var sourceDocument = app.activeDocument;
var destDocument = app.documents.add();
destDocument.importStyles(ImportFormat.paragraphStylesFormat, new File(sourceDocument.filePath + "/" + sourceDocument.name), GlobalClashResolutionStrategy.LOAD_ALL_WITH_OVERWRITE);
destDocument.importStyles(ImportFormat.characterStylesFormat, new File(sourceDocument.filePath + "/" + sourceDocument.name), GlobalClashResolutionStrategy.LOAD_ALL_WITH_OVERWRITE);
destDocument.viewPreferences.horizontalMeasurementUnits = sourceDocument.viewPreferences.horizontalMeasurementUnits;
destDocument.viewPreferences.verticalMeasurementUnits = sourceDocument.viewPreferences.verticalMeasurementUnits;
destDocument.documentPreferences.facingPages = sourceDocument.documentPreferences.facingPages;
destDocument.documentPreferences.pageHeight = sourceDocument.documentPreferences.pageHeight;
destDocument.documentPreferences.pageWidth = sourceDocument.documentPreferences.pageWidth;
destDocument.documentPreferences.pageSize = sourceDocument.documentPreferences.pageSize;
var sourceSpreads = sourceDocument.spreads;
var nbSourceSpreads = sourceSpreads.length;
var firstPageFound = false;
var lastPageFound = false;
var i;
var newSpreadNeeded;
var currentDestSpread;
for (i = 0; !lastPageFound, i < nbSourceSpreads; ++i) {
newSpreadNeeded = true;
var sourcePages = sourceSpreads[i].pages;
var nbSourcePages = sourcePages.length;
var j;
for (j = 0; !lastPageFound, j < nbSourcePages; ++j) {
if (sourcePages[j].name === firstPageName.editContents) {
firstPageFound = true;
destDocument.documentPreferences.startPageNumber = parseInt(firstPageName.editContents); // We want to preserve page numbers
}
if (firstPageFound) {
// Copy this page over to the new document.
var firstInNewSpread = false;
if (newSpreadNeeded) {
currentDestSpread = destDocument.spreads.add();
newSpreadNeeded = false;
firstInNewSpread = true;
}
var newPage = sourcePages[j].duplicate(LocationOptions.AT_END, currentDestSpread);
var k;
for (k = 0; k < newPage.index; ++k) {
currentDestSpread.pages[k].remove();
}
}
if (sourcePages[j].name === lastPageName.editContents) {
lastPageFound = true;
}
}
}
destDocument.spreads[0].remove();
I was hacking around and came up with this little script. Although it approaches the problem from the opposite direction, it seems to work fine here. Also, I'm still running in InDesign CS5, but maybe it will work for you. Hopefully I got the gist of your question?
This will extract pages 3 through 5 into a separate document:
var doc = app.activeDocument;
var newFilePath = doc.filePath + "/subset_" + doc.name;
var newFile = File(newFilePath); // Create a new file path
doc.saveACopy(newFile); // Save a copy of the doc
var newDoc = app.open(newFile); // Open the copy
var firstPageNum = 3; // First page number in the range
var lastPageNum = 5; // Last page number in the range
var firstPage = newDoc.pages[firstPageNum-1];
var lastPage = newDoc.pages[lastPageNum-1];
// Remove all text from the last page in the range to the end of the document
var lastPageFrames = lastPage.textFrames.everyItem().getElements();
for (var i=0; i < lastPageFrames.length; i++) {
var frame = lastPageFrames[i];
var parentStory = frame.parentStory;
var lastFrameInsert = frame.insertionPoints.lastItem();
var lastStoryInsert = parentStory.insertionPoints.lastItem();
var textAfter = parentStory.insertionPoints.itemByRange(lastFrameInsert,lastStoryInsert);
textAfter.remove();
};
// Remove all text from the beginning of the document to the first page in the range
var firstPageFrames = firstPage.textFrames.everyItem().getElements();
for (var i=0; i < firstPageFrames.length; i++) {
var frame = firstPageFrames[i];
var parentStory = frame.parentStory;
var firstFrameInsert = frame.insertionPoints.firstItem();
var textBefore = parentStory.insertionPoints.itemByRange(0,firstFrameInsert.index);
textBefore.remove();
};
// Remove the pages that aren't in the range
var allPages = newDoc.pages.everyItem().getElements();
for (var i=0; i < allPages.length; i++) {
var page = allPages[i];
if (i < firstPageNum || i > lastPageNum) {
page.remove();
}
};