SP.NavigationNode.get_isVisible() broken? - sharepoint

I need to read the "Top Nav", the "Children Nodes" and check if each node is visible.
I am using JSOM to accomplish this. Everything is working fine except for the get_isVisible() function. It always returns true. MSDN: http://msdn.microsoft.com/en-us/library/office/jj246297.aspx
I am on a publishing site in 2013 and I know some of the items are hidden. (My web and context are defined outside of this snippet)
var visParents = [], visChildren = [];
var topNodes = web.get_navigation().get_topNavigationBar();
context.load(topNodes);
context.executeQueryAsync(onQuerySucceeded, onQueryFailed)
function onQuerySucceeded() {
var nodeInfo = '';
var nodeEnumerator = topNodes.getEnumerator();
while (nodeEnumerator.moveNext()) {
var node = nodeEnumerator.get_current();
nodeInfo += node.get_title() + '\n';
if (node.get_isVisible())
visParents.push(node);
}
console.log("Current nodes: \n\n" + nodeInfo);
console.log("Visible Parents", visParents)
}
function onQueryFailed(sender, args) {
alert('Request failed. ' + args.get_message() + '\n' + args.get_stackTrace());
}

It is a known issue, it seems that SP.NavigationNode.isVisible property does not correspond to the property that indicates whether navigation node is hidden or shown.
Please refer "Hidden" property of SPNavigationNode for a details
The following function demonstrates how to retrieve hidden node Urls:
function getGlobalNavigationExcludedUrls(Success,Error)
{
var context = new SP.ClientContext.get_current();
var web = context.get_web();
var subwebs = web.get_webs();
var pagesList = web.get_lists().getByTitle("Pages");
var pageItems = pagesList.getItems(SP.CamlQuery.createAllItemsQuery());
var allProperties = web.get_allProperties();
context.load(web);
context.load(subwebs);
context.load(allProperties);
context.load(pageItems);
context.executeQueryAsync(
function() {
var excludedIds = allProperties.get_item('__GlobalNavigationExcludes').split(';');
var exludedUrls = [];
for (var i = 0; i < excludedIds.length - 1; i++ )
{
for (var j = 0; j < subwebs.get_count(); j++ )
{
var subweb = subwebs.getItemAtIndex(j);
if(subweb.get_id().toString() == excludedIds[i]){
exludedUrls.push(subweb.get_serverRelativeUrl());
break;
}
}
for (var j = 0; j < pageItems.get_count(); j++ )
{
var pageItem = pageItems.getItemAtIndex(j);
if(pageItem.get_item('UniqueId').toString() == excludedIds[i]){
exludedUrls.push(web.get_serverRelativeUrl() + pageItem.get_item('FileRef'));
break;
}
}
}
Success(exludedUrls);
},
Error
);
}
//Usage: print excluded nodes Urls
getGlobalNavigationExcludedUrls(function(excludedNodeUrls){
for (var j = 0; j < excludedNodeUrls.length; j++ )
{
console.log(excludedNodeUrls[j]);
}
},
function(sender,args){
console.log(args.get_message());
});

Related

typererror: .push is not a function

Im trying to split a string into args and subargs and return it in an object, but then im getting the error...
Here is my code:
function subargs(subargs) {
var argargs = {};
for (var i = 0; i <= subargs.length; i++) {
if(subargs[i].indexOf(':') > -1) {
var arg = subargs[i].slice(0, subargs[i].indexOf(':'))
subargs[i].slice(subargs[i].indexOf(':') + 1, subargs[i].length)
argargs.push(`["arg":${arg},"subarg":${subargs}]`);
console.log(arg);
console.log(subargs);
}
};
return argargs;
}
var argargs = {}; so it's an object, not an array. You can only .push to an array – says CertainPerformance above

How to verify a document from QLDB in Node.js?

I'm trying to verify a document from QLDB using nodejs. I have been following the Java verification example as much as I can, but I'm unable to calculate the same digest as stored on the ledger.
This is the code I have come up with. I query the proof and block hash from QLDB and then try to calculate digest in the same way as in Java example. But after concatinating the two hashes and calculating the new hash from the result I get the wrong output from crypto.createHash('sha256').update(c).digest("base64"). I have also tried using "base64" instead of "hex" with different, but also wrong result.
const rBlock = makeReader(res.Revision.IonText);
var block = [];
rBlock.next();
rBlock.stepIn();
rBlock.next();
while (rBlock.next() != null) {
if (rBlock.fieldName() == 'hash') {
block.push(Buffer.from(rBlock.byteValue()).toString('hex'));
}
}
console.log(block);
var proof = [];
const rProof = makeReader(res.Proof.IonText);
rProof.next();
rProof.stepIn();
while (rProof.next() != null) {
proof.push(Buffer.from(rProof.byteValue()).toString('hex'));
}
var ph = block[0];
var c;
for (var i = 0; i < proof.length; i++) {
console.log(proof[i])
for (var j = 0; j < ph.length; j++) {
if (parseInt(ph[j]) > parseInt(proof[i][j])){
c = ph + proof[i];
break;
}
if (parseInt(ph[j]) < parseInt(proof[i][j])){
c = proof[i] + ph;
break;
}
}
ph = crypto.createHash('sha256').update(c).digest("hex");
console.log(ph);
console.log();
}
I have figure it out. The problem was that I was converting the blobs to hex strings and hash them instead of the raw values. For anyone wanting to verify data in node, here is the bare solution:
ledgerInfo.getRevision(params).then(res => {
console.log(res);
const rBlock = makeReader(res.Revision.IonText);
var ph;
rBlock.next();
rBlock.stepIn();
rBlock.next();
while (rBlock.next() != null) {
if (rBlock.fieldName() == 'hash') {
ph = rBlock.byteValue()
}
}
var proof = [];
const rProof = makeReader(res.Proof.IonText);
rProof.next();
rProof.stepIn();
while (rProof.next() != null) {
proof.push(rProof.byteValue());
}
for (var i = 0; i < proof.length; i++) {
var c;
if (hashComparator(ph, proof[i]) < 0) {
c = concatTypedArrays(ph, proof[i]);
}
else {
c = concatTypedArrays(proof[i], ph);
}
var buff = crypto.createHash('sha256').update(c).digest("hex");
ph = Uint8Array.from(Buffer.from(buff, 'hex'));
}
console.log(Buffer.from(ph).toString('base64'));
}).catch(err => {
console.log(err, err.stack)
});
function concatTypedArrays(a, b) {
var c = new (a.constructor)(a.length + b.length);
c.set(a, 0);
c.set(b, a.length);
return c;
}
function hashComparator(h1, h2) {
for (var i = h1.length - 1; i >= 0; i--) {
var diff = (h1[i]<<24>>24) - (h2[i]<<24>>24);
if (diff != 0)
return diff;
}
return 0;
}

Instagram Auto-Like JavaScript BOT

This code brings back an error of
Uncaught TypeError: Cannot read property 'innerHTML' of null
at doLike (<anonymous>:20:21)
at <anonymous>:35:1
doLike # VM1269:20
(anonymous) # VM1269:35
It has worked in the past, I got it from this website : https://blog.joeldare.com/simple-instagram-like-bot/
function getHeartElement() {
var knownHeartElementNames = ["coreSpriteHeartOpen", "coreSpriteLikeHeartOpen"];
var i = 0;
// Loop through the known heart elements until one works
for (i = 0; i < knownHeartElementNames.length; i++) {
var heartElement = document.querySelector("." + knownHeartElementNames[i]);
if (heartElement != undefined) {
break;
}
}
return heartElement;
}
function doLike() {
var likeMax = 100;
var likeElement = getHeartElement();
var nextElement = document.querySelector(".coreSpriteRightPaginationArrow");
likeCount++;
var nextTime = Math.random() * (14000 - 4000) + 4000;
if (likeElement.innerHTML.match("Unlike") == null) {
likeElement.click();
console.log(likeCount + " - liked");
} else {
console.log(likeCount + " - skipped");
}
setTimeout(function() {nextElement.click();}, 1000);
if (likeCount < likeMax) {
setTimeout(doLike, nextTime);
} else {
console.log("Nice! Time for a break.");
}
}
var likeCount = 0;
doLike();
You may want to use a tool such a Keygram - https://www.thekeygram.com
It works really well for me to gain followers

Recursively get documents in Azure Cosmos Stored Procedure

I'm trying to generate a content tree for a simple wiki. Each page has a Children property that stores the id of other wiki pages. I'm trying to write a SPROC that gets all of the documents, then iterate over each page's Children property and replace each item with an actual wiki page document. I'm able to get the first set of documents, but the wikiChildQuery returns undefined and I'm not quite sure why.
I've been able to get a document with the query alone but for some reason, it doesn't work within the SPROC. Is there something I'm missing here?
function GetWikiMetadata(prefix) {
var context = getContext();
var collection = context.getCollection();
var metadataQuery = 'SELECT {\"ExternalId\": p.ExternalId, \"Title\": p.Title, \"Slug\": p.Slug, \"Children\": p.Children} FROM Pages p'
var metadata = collection.queryDocuments(collection.getSelfLink(), metadataQuery, {}, function (err, documents, options) {
if (err) throw new Error('Error: ', + err.message);
if (!documents || !documents.length) {
throw new Error('Unable to find any documents');
} else {
var response = getContext().getResponse();
for (var i = 0; i < documents.length; i++) {
var children = documents[i]['$1'].Children;
if (children.length) {
for (var j = 0; j < children.length; j++) {
var child = children[j];
children[j] = GetWikiChildren(child);
}
}
}
response.setBody(documents);
}
});
if (!metadata) throw new Error('Unable to get metadata from the server');
function GetWikiChildren(child) {
var wikiChildQuery = metadataQuery + ' WHERE p.ExternalId = \"' + child + '\"';
var wikiChild = collection.queryDocuments(collection.getSelfLink(), wikiChildQuery, {}, function(err, document, options) {
if (err) throw new Error('Error: ', + err.message);
if (!document) {
throw new Error('Unable to find child Wiki');
} else {
var children = document.Children;
if (children) {
for (var k = 0; k < children.length; k++) {
var child = children[k];
children[k] = GetWikiChildren(child);
}
} else {
return document;
}
}
if (!wikChild) throw new Error('Unable to get child Wiki details');
});
}
}
1.I'm able to get the first set of documents, but the wikiChildQuery
returns undefined and I'm not quite sure why.
Firstly, here should be corrected. In the first loop, you get children array with documents[i]['$1'].Children, however , in the GetWikiChildren function you want to get children array with document.Children? Surely,it is undefined. You need to use var children = document[0]['$1'].Children;
2.It seems that you missed the replaceDocument method.
You could refer to the snippet code of your metaDataQuery function:
for (var i = 0; i < documents.length; i++) {
var children = documents[i]['$1'].Children;
if (children.length) {
for (var j = 0; j < children.length; j++) {
var child = children[j];
children[j] = GetWikiChildren(child);
}
}
documents[i]['$1'].Children = children;
collection.replaceDocument(doc._self,doc,function(err) {
if (err) throw err;
});
}
3.Partial update is not supported by Cosmos db SQL Api so far but it is hitting the road.
So, if your sql doesn't cover your whole columns,it can't be done for your replace purpose.(feedback) It is important to note that the columns which is not mentioned in the replace object would be devoured while using replaceDocument method.

Opening tabs in Chrome news reader extension

I'm trying to create a simple chrome extension using the following google RSS reader sample,
http://code.google.com/chrome/extensions/samples.html#597015d3bcce3da693b02314afd607bec4f55291
I can add links in the pop-up window that open tabs, but not from the feeds themselves.
Looping through the items in the feed, grabbing title tags and link tags, I want the title to link the the appropriate sites
var entries = doc.getElementsByTagName('item');
var count = Math.min(entries.length, maxFeedItems);
for (var i = 0; i < count; i++) {
item = entries.item(i);
// Grab the title for the feed item.
var itemTitle = item.getElementsByTagName('title')[0];
if (itemTitle) {
itemTitle = itemTitle.textContent;
} else {
itemTitle = "Unknown title";
}
// Grab the link for this feed item
var itemLink = item.getElementsByTagName('link')[0];
if (itemLink) {
itemLink = itemLink.textContent;
} else {
itemLink = "Unknown link";
}
var title = document.createElement("a");
title.className = "item_title";
title.innerText = itemTitle; //display title in iframe
title.addEventListener("click", titleLink); // should open link when clicking on title, but does not.
}
// -------------------------------------------------------------------
// Show |url| in a new tab.
function showUrl(url) {
// Only allow http and https URLs.
if (url.indexOf("http:") != 0 && url.indexOf("https:") != 0) {
return;
}
chrome.tabs.create({url: url});
}
function moreStories(event) {
showUrl(moreStoriesUrl);
}
function titleLink(event) {
showUrl(itemLink);
}
Any thoughts on why this is not working.
If I replace title.addEventListener("click", titleLink); with title.addEventListener("click", moreStories); each title will link to moreStories, I cannot get each title to link to itemLink.
Thanks
Its a bit hard to answer your question without the whole code, but Ill give it a shot ;)
First up, titleLink() isnt going to work because itemLink isnt known. When you create title (the link) you should of attached it to that...say title.href=itemLink then in tiltleLinks you could access that href with showUrl(event.currentTarget.href)
Also did you fix the error in that example?...if not then change frameLoaded to....
function frameLoaded() {
var links = document.getElementsByTagName("A");
for (i = 0; i < links.length; i++) {
var clssName = links[i].className;
if (clssName != "item_title" && clssName != "open_box") {
links[i].addEventListener("click", showStory);
}
}
window.addEventListener("message", messageHandler);
}
If you still have probs could you attach the whole code so I can see what your doing and Ill give you a hand.
Thank you very much for your help.
code title.href=itemLink and code showUrl(event.currentTarget.href) was exactly what I needed.
For completeness, here is the full code,
<script id="iframe_script">
function reportHeight() {
var msg = JSON.stringify({type:"size", size:document.body.offsetHeight});
parent.postMessage(msg, "*");
}
function frameLoaded() {
var links = document.getElementsByTagName("A");
for (i = 0; i < links.length; i++) {
var class = links[i].className;
if (class != "item_title" && class != "open_box") {
links[i].addEventListener("click", showStory);
}
}
window.addEventListener("message", messageHandler);
}
function showStory(event) {
var href = event.currentTarget.href;
parent.postMessage(JSON.stringify({type:"show", url:href}), "*");
event.preventDefault();
}
function messageHandler(event) {
reportHeight();
}
</script>
<script>
// Feed URL.
var feedUrl = 'http://localhost/newsfeed.xml';
// The XMLHttpRequest object that tries to load and parse the feed.
var req;
function main() {
req = new XMLHttpRequest();
req.onload = handleResponse;
req.onerror = handleError;
req.open("GET", feedUrl, true);
req.send(null);
}
// Handles feed parsing errors.
function handleFeedParsingFailed(error) {
var feed = document.getElementById("feed");
feed.className = "error";
feed.innerText = "Error: " + error;
}
// Handles errors during the XMLHttpRequest.
function handleError() {
handleFeedParsingFailed('Failed to fetch RSS feed.');
}
// Handles parsing the feed data we got back from XMLHttpRequest.
function handleResponse() {
var doc = req.responseXML;
if (!doc) {
handleFeedParsingFailed("Not a valid feed.");
return;
}
buildPreview(doc);
}
// The maximum number of feed items to show in the preview.
var maxFeedItems = 10;
// Where the more stories link should navigate to.
var moreStoriesUrl;
function buildPreview(doc) {
// Get the link to the feed source.
var link = doc.getElementsByTagName("link");
var parentTag = link[0].parentNode.tagName;
if (parentTag != "item" && parentTag != "entry") {
moreStoriesUrl = link[0].textContent;
}
// Setup the title image.
var images = doc.getElementsByTagName("image");
var titleImg;
if (images.length != 0) {
var urls = images[0].getElementsByTagName("url");
if (urls.length != 0) {
titleImg = urls[0].textContent;
}
}
var img = document.getElementById("title");
// Listen for mouse and key events
if (titleImg) {
img.src = titleImg;
if (moreStoriesUrl) {
document.getElementById("title_a").addEventListener("click",moreStories);
document.getElementById("title_a").addEventListener("keydown",
function(event) {
if (event.keyCode == 13) {
moreStories(event);
}});
}
} else {
img.style.display = "none";
}
// Construct the iframe's HTML.
var iframe_src = "<!doctype html><html><head><script>" +
document.getElementById("iframe_script").textContent + "<" +
"/script></head><body onload='frameLoaded();' " +
"style='padding:0px;margin:0px;'>";
var feed = document.getElementById("feed");
// Set ARIA role indicating the feed element has a tree structure
feed.setAttribute("role", "tree");
var entries = doc.getElementsByTagName('item');
var count = Math.min(entries.length, maxFeedItems);
for (var i = 0; i < count; i++) {
item = entries.item(i);
// Grab the title for the feed item.
var itemTitle = item.getElementsByTagName('title')[0];
if (itemTitle) {
itemTitle = itemTitle.textContent;
} else {
itemTitle = "Unknown title";
}
// Grab the link for the feed item.
var itemLink = item.getElementsByTagName('link')[0];
if (itemLink) {
itemLink = itemLink.textContent;
} else {
itemLink = "Unknown link";
}
var item = document.createElement("div");
var title = document.createElement("a");
title.innerText = itemTitle; //display title in iframe
title.href=itemLink;
title.addEventListener("click", titleLink);
item.appendChild(title);
feed.appendChild(item);
}
if (moreStoriesUrl) {
var more = document.createElement("a");
more.className = "more";
more.innerText = "***Site Main Page*** \u00BB";
more.tabIndex = 0;
more.addEventListener("click", moreStories);
more.addEventListener("keydown", function(event) {
if (event.keyCode == 13) {
moreStories(event);
}});
feed.appendChild(more);
}
}
// -------------------------------------------------------------------
// Show |url| in a new tab.
function showUrl(url) {
// Only allow http and https URLs.
if (url.indexOf("http:") != 0 && url.indexOf("https:") != 0) {
return;
}
chrome.tabs.create({url: url});
}
// -------------------------------------------------------------------
function moreStories(event) {
showUrl(moreStoriesUrl);
}
function titleLink(event) {
showUrl(event.currentTarget.href);
}
function keyHandlerShowDesc(event) {
// Display content under heading when spacebar or right-arrow pressed
// Hide content when spacebar pressed again or left-arrow pressed
// Move to next heading when down-arrow pressed
// Move to previous heading when up-arrow pressed
if (event.keyCode == 32) {
showDesc(event);
} else if ((this.parentNode.className == "item opened") &&
(event.keyCode == 37)) {
showDesc(event);
} else if ((this.parentNode.className == "item") && (event.keyCode == 39)) {
showDesc(event);
} else if (event.keyCode == 40) {
if (this.parentNode.nextSibling) {
this.parentNode.nextSibling.children[1].focus();
}
} else if (event.keyCode == 38) {
if (this.parentNode.previousSibling) {
this.parentNode.previousSibling.children[1].focus();
}
}
}
function showDesc(event) {
var item = event.currentTarget.parentNode;
var items = document.getElementsByClassName("item");
for (var i = 0; i < items.length; i++) {
var iframe = items[i].getElementsByClassName("item_desc")[0];
if (items[i] == item && items[i].className == "item") {
items[i].className = "item opened";
iframe.contentWindow.postMessage("reportHeight", "*");
// Set the ARIA state indicating the tree item is currently expanded.
items[i].getElementsByClassName("item_title")[0].
setAttribute("aria-expanded", "true");
iframe.tabIndex = 0;
} else {
items[i].className = "item";
iframe.style.height = "0px";
// Set the ARIA state indicating the tree item is currently collapsed.
items[i].getElementsByClassName("item_title")[0].
setAttribute("aria-expanded", "false");
iframe.tabIndex = -1;
}
}
}
function iframeMessageHandler(e) {
// Only listen to messages from one of our own iframes.
var iframes = document.getElementsByTagName("IFRAME");
for (var i = 0; i < iframes.length; i++) {
if (iframes[i].contentWindow == e.source) {
var msg = JSON.parse(e.data);
if (msg) {
if (msg.type == "size") {
iframes[i].style.height = msg.size + "px";
}
else if (msg.type == "show") {
var url = msg.url;
if (url.indexOf("http://localhost/index.html") == 0) {
// If the URL is a redirect URL, strip of the destination and go to
// that directly. This is necessary because the Google news
// redirector blocks use of the redirects in this case.
var index = url.indexOf("&url=");
if (index >= 0) {
url = url.substring(index + 5);
index = url.indexOf("&");
if (index >= 0)
url = url.substring(0, index);
}
}
showUrl(url);
}
}
return;
}
}
}
window.addEventListener("message", iframeMessageHandler);
</script>
Thanks again for the help.
-Mike

Resources