I am implementing a targeted search using Redis
Below the function for indexing ads
def index_ad(conn,id,locations,content,type,value):
pipeline = conn.pipeline(True)
for location in locations:
pipeline.sadd('idx:req:'+location,id)
words = tokenize(content)
for word in words:
pipeline.zadd('idx:'+word,id,0)
rvalue = TO_ECPM[type](1000,AVERAGE_PER_1K.get(type,1),value)
pipeline.hset('type:',id,type)
pipeline.zadd('idx:ad:value:',id,rvalue)
pipeline.zadd('ad:base_value:',id,value)
pipeline.sadd('terms:'+id,*list(words))
pipeline.execute()
I get the following error:
Command # 3 (ZADD idx:look 0 1) of pipeline caused error: WRONGTYPE Operation against a key holding the wrong kind of value
Related
I am trying to create a virtual table in HANA based on a remote system table view.
If I run it at the command line using hdbsql
hdbsql H00=> create virtual table HanaIndexTable at "SYSRDL#CG_SOURCE"."<NULL>"."dbo"."sysiqvindex"
0 rows affected (overall time 305.661 msec; server time 215.870 msec)
I am able to select from HanaIndexTable and get results and see my index.
When I code it in python, I use the following command:
cursor.execute("""create virtual table HanaIndexTable1 at SYSRDL#CG_source.\<NULL\>.dbo.sysiqvindex""")
I think there is a problem with the NULL. But I see in the output that the escape key is doubled.
self = <hdbcli.dbapi.Cursor object at 0x7f02d61f43d0>
operation = 'create virtual table HanaIndexTable1 at SYSRDL#CG_source.\\<NULL\\>.dbo.sysiqvindex'
parameters = None
def __execute(self, operation, parameters = None):
# parameters is already checked as None or Tuple type.
> ret = self.__cursor.execute(operation, parameters=parameters, scrollable=self._scrollable)
E hdbcli.dbapi.ProgrammingError: (257, 'sql syntax error: incorrect syntax near "\\": line 1 col 58 (at pos 58)')
/usr/local/lib/python3.7/site-packages/hdbcli/dbapi.py:69: ProgrammingError
I have tried to run the command without the <> but get the following error.
hdbcli.dbapi.ProgrammingError: (257, 'sql syntax error: incorrect syntax near "NULL": line 1 col 58 (at pos 58)')
I have tried upper case, lower case and escaping. Is what I am trying to do impossible?
There was an issue with capitalization between HANA and my remote source. I also needed more escaping rather than less.
Building a search with Algolia search product for our Moodle site.
To update the index at Algolia site we want to use some automated process (upload the index data).
I decided to start with the Python client (Python v 3.9.x). Since we are working inside a corporate network we are behind the firewalls and there is a problem accessing algolia's servers.
I'm testing with two methods:
Search within already existing indices: index.search
update all indexes: index.replace_all_objects
Error message: 'Unreachable hosts'
Here is my code snippets:
# Search:
searchAppID = constants.ALGOLIA_APP_ID
searchKeyHash = constants.ALGOLIA_SEARCH_KEY
config = SearchConfig(searchAppID, searchKeyHash)
config.connect_timeout = 2
config.read_timeout = 5
config.write_timeout = 30
client = SearchClient.create_with_config(config)
index = client.init_index('myIndex')
result = index.search('SearchString')
# function to update index using json file:
def replace_IdxObj(client,index,fileName):
try:
if (index.exists()):
with open(fileName, encoding="utf8") as f:
data = json.load(f)
result = index.replace_all_objects(data, { 'autoGenerateObjectIDIfNotExist' : True })
return result
else:
print('No Index found! Cancel operation... \n')
return None
except Exception as err:
print ("Error: " + str(err))
When I test my code outside of the company's network - it's all ok!
Also I have a SSL certificate that could be used to work with external resources, so I used it in the following test (running from company's network behind the firewall):
response = requests.get('https://google.com/', verify=("C:\\Program Files\\Common Files\\SSL\\certs\\cert.cer"))
print(response)
This test works just fine (I'm getting 200 response)!
Please advise are there any ways to make Python process (which is used by algolia's python client) to use the certificate that I have ??? Any alternatives?
Many thanks!
I am following the documentation to acquire a lock on a variable but it fails:
a = 0
lock(a) do
a += 1
end
Error message:
ERROR: MethodError: no method matching lock(::var"#3#4", ::Int64)
Closest candidates are:
lock(::Any, ::Base.GenericCondition) at condition.jl:78
lock(::Any, ::Base.AbstractLock) at lock.jl:158
lock(::Any, ::WeakKeyDict) at weakkeydict.jl:76
Stacktrace:
[1] top-level scope at REPL[3]:1
The error message is clear but why would the code provided in the documentation fail. Moreover, not sure where to look for the detailed documentation about the lock function.
OK, I figured it out:
a = 0
l = ReentrantLock()
lock(l) do
global a # Needed if using REPL
a += 1
end
When I try to retreive my table in informix with ifxpy package, I get this error:
---------------------------------------------------------------------------
Exception Traceback (most recent call last)
<ipython-input-6-b0557e7f099b> in <module>
16 while dictionary != False:
17 tph.append(dictionary)
---> 18 dictionary = IfxPy.fetch_assoc(stmt)
19 print(pd.DataFrame(tph))
Exception: [Informix][Informix ODBC Driver]Invalid string or buffer length. SQLCODE=-11071
This is my code:
import IfxPy
import pandas as pd
ConStr = "SERVER=informix1;DATABASE=ir_fmois;HOST=127.0.0.1;SERVICE=9092;UID=informix;PWD=1234;"
# netstat -a | findstr 9088
try:
# netstat -a | findstr 9088
conn = IfxPy.connect( ConStr, "", "")
except Exception as e:
print ('ERROR: Connect failed')
print ( e )
quit()
sql = "SELECT * FROM oih"
stmt = IfxPy.exec_immediate(conn, sql)
dictionary = IfxPy.fetch_assoc(stmt)
tph=[]
while dictionary != False:
tph.append(dictionary)
dictionary = IfxPy.fetch_assoc(stmt)
print(pd.DataFrame(tph))
When I print the dataframe I see that I got only the first 4 rows.
I tried also this code and i doesn't trhow any exception but it returns also the first 4 rows of my table:
import IfxPyDbi as dbapi2
ConStr = "SERVER=informix1;DATABASE=ir_fmois;HOST=127.0.0.1;SERVICE=9092;UID=informix;PWD=1234;"
conn = dbapi2.connect( ConStr, "", "")
cur = conn.cursor()
sql = "SELECT * FROM oih"
rows = cur.fetchall()
len(rows)
>>>4
EDIT
I tried importing columns one by one, and the same error has occured when I tried to select a byte (blob) column (with text data type). This column has emty values in the first 4 rows but the fifth wasn't empty, and I think this is why the error occured in line 5.
I would really appreciate if anyone has any idea on how to solve this.
The finderr command reports:
$ finderr -11071
-11071 Invalid string or buffer length.
This Informix CLI error code is the same as SQLSTATE value S1090. The
following functions can return this error code: SQLBindCol(),
SQLBindParameter(), SQLBrowseConnect(), SQLColAttributes(),
SQLColumnPrivileges(), SQLColumns(), SQLConnect(), SQLDataSources(),
SQLDescribeCol(), SQLDriverConnect(), SQLDrivers(), SQLExecDirect(),
SQLExecute(), SQLForeignKeys(), SQLGetCursorName(), SQLGetData(),
SQLGetInfo(), SQLNativeSql(), SQLPrepare(), SQLPrimaryKeys(),
SQLProcedureColumns(), SQLProcedures(), SQLPutData(), SQLSetCursorName(),
SQLSpecialColumns(), SQLStatistics(), SQLTablePrivileges(), and SQLTables().
The value specified for the argument cbValueMax is less than zero.
Supply a value for the argument cbValueMax that is zero or greater.
For all functions, an argument that specified a string or buffer length, such
as cbCursor, cbConnStrIn, or cbSqlStr, had one or more of the following
problems: (1) It was less than 0, (2) It was less than 0 but not equal to
SQL_NTS or SQL_NULL_DATA, (3) It was less than 0 but the corresponding
pointer was not a null pointer, (4) It was equal to 1, or (5) It was too
large. Set the string or buffer length to a valid value.
Additionally for SQLExecDirect() and SQLExecute(), a parameter value that was
set with SQLBindParameter() had one of the following problems: (1) It was a
null pointer, and the parameter length was not 0, SQL_NULL_DATA,
SQL_DATA_AT_EXEC, or less than or equal to SQL_LEN_DATA_AT_EXEC_OFFSET, or
(2) It was not a null pointer, and the parameter length was less than 0 but
was not SQL_NTS, SQL_NULL_DATA, SQL_DATA_AT_EXEC, or less than or equal to
SQL_LEN_EXEC_DATA_AT_EXEC_OFFSET. Set the parameter value to a valid value.
$
Since your Python code is not calling any of those functions directly, that suggests there is a bug in the ifxpy driver. It is probably calling one of the listed functions with an incorrect argument.
As such, you should probably report it as an 'issue' on the GitHub site for the driver: https://github.com/OpenInformix/IfxPy
I'm using python 3 to write a script that generates a customer report for Solarwinds N-Central. The script uses SOAP to query N-Central and I'm using zeep for this project. While not new to python I am new to SOAP.
When calling the CustomerList fuction I'm getting the TypeError: __init__() got an unexpected keyword argument 'listSOs'
import zeep
wsdl = 'http://' + <server url> + '/dms/services/ServerEI?wsdl'
client = zeep.CachingClient(wsdl=wsdl)
config = {'listSOs': 'true'}
customers = client.service.CustomerList(Username=nc_user, Password=nc_pass, Settings=config)
Per the perameters below 'listSOs' is not only a valid keyword, its the only one accepted.
CustomerList
public com.nable.nobj.ei.Customer[] CustomerList(String username, String password, com.nable.nobj.ei.T_KeyPair[] settings) throws RemoteException
Parameters:
username - MSP N-central username
password - Corresponding MSP N-central password
settings - A list of non default settings stored in a T_KeyPair[]. Below is a list of the acceptable Keys and Values. If not used leave null
(Key) listSOs - (Value) "true" or "false". If true only SOs with be shown, if false only customers and sites will be shown. Default value is false.
I've also tried passing the dictionary as part of a list:
config = []
key = {'listSOs': 'true'}
config += key
TypeError: Any element received object of type 'str', expected lxml.etree._Element or builtins.dict or zeep.objects.T_KeyPair
Omitting the Settings value entirely:
customers = client.service.CustomerList(Username=nc_user, Password=nc_pass)
zeep.exceptions.ValidationError: Missing element Settings (CustomerList.Settings)
And trying zeep's SkipValue:
customers = client.service.CustomerList(Username=nc_user, Password=nc_pass, Settings=zeep.xsd.SkipValue)
zeep.exceptions.Fault: java.lang.NullPointerException
I'm probably missing something simple but I've been banging my head against the wall off and on this for awhile I'm hoping someone can point me in the right direction.
Here's my source code from my getAssets.py script. I did it in Python2.7, easily upgradeable though. Hope it helps someone else, N-central's API documentation is really bad lol.
#pip2.7 install zeep
import zeep, sys, csv, copy
from zeep import helpers
api_username = 'your_ncentral_api_user'
api_password='your_ncentral_api_user_pw'
wsdl = 'https://(yourdomain|tenant)/dms2/services2/ServerEI2?wsdl'
client = zeep.CachingClient(wsdl=wsdl)
response = client.service.deviceList(
username=api_username,
password=api_password,
settings=
{
'key': 'customerId',
'value': 1
}
)
# If you can't tell yet, I code sloppy
devices_list = []
device_dict = {}
dev_inc = 0
max_dict_keys = 0
final_keys = []
for device in response:
# Iterate through all device nodes
for device_properties in device.items:
# Iterate through each device's properties and add it to a dict (keyed array)
device_dict[device_properties.first]=device_properties.second
# Dig further into device properties
device_properties = client.service.devicePropertyList(
username=api_username,
password=api_password,
deviceIDs=device_dict['device.deviceid'],
reverseOrder=False
)
prop_ind = 0 # This is a hacky thing I did to make my CSV writing work
for device_node in device_properties:
for prop_tree in device_node.properties:
for key, value in helpers.serialize_object(prop_tree).items():
prop_ind+=1
device_dict["prop" + str(prop_ind) + "_" + str(key)]=str(value)
# Append the dict to a list (array), giving us a multi dimensional array, you need to do deep copy, as .copy will act like a pointer
devices_list.append(copy.deepcopy(device_dict))
# check to see the amount of keys in the last item
if len(devices_list[-1].keys()) > max_dict_keys:
max_dict_keys = len(devices_list[-1].keys())
final_keys = devices_list[-1].keys()
print "Gathered all the datas of N-central devices count: ",len(devices_list)
# Write the data out to a CSV
with open('output.csv', 'w') as csvfile:
fieldnames = final_keys
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for csv_line in devices_list:
writer.writerow(csv_line)