sphinx indexer is not working - sql-server-2008

i want to index MSSql 2008 database in windows 7.
this is my config file
source main
{
type = mssql
sql_host = localhost
sql_user = sa
sql_pass = pass
#mssql_winauth = 1
sql_db = article
sql_port = 1433 # optional, default is 3306
mssql_unicode = 1 # request Unicode data from server
sql_query_pre = insert into IndexLog(StartDateTime, Count, Successful) select top 1 getdate(), (select COUNT(id) from article),0 from article
sql_query = SELECT ID ,ROW_NUMBER() OVER(ORDER BY Title) AS TitleRowNo\
,Title,abstract \
FROM article
#Log index end
sql_query_post_index = update IndexLog set EndDateTime = getdate(), successful = 1 where id = (select max(id) from IndexLog)
sql_attr_uint = ID
sql_attr_uint = TitleRowNo
}
index news_main
{
source = main
path = C:/sphinx/article/data/news_main
docinfo = extern
charset_type = utf-8
min_word_len = 3
min_infix_len = 3
infix_fields = Title, abstract
# min_stemming_len = 0
# index_exact_words = 1
}
indexer
{
mem_limit = 32M
}
searchd
{
port = 6550 # 9312
log = C:/sphinx/article/log/searchd.log
query_log = C:/sphinx/article/log/query.log
read_timeout = 5
max_children = 0 # concurrent searches to run in parallel
pid_file = C:/sphinx/article/log/searchd.pid
max_matches = 1000
seamless_rotate = 1
preopen_indexes = 0
unlink_old = 1
collation_server = utf8_general_ci
collation_libc_locale = utf8_general_ci
}
i use this command to run index.
indexer --config "C:\sphinx\a.sphinx.conf" news_main
but is not work.

Related

SSRS Download All History Snapshots

Is it possible to download all the history snapshots of a report at once? Preferably as a CSV. Would save a lot time instead of clicking into each one individually and selecting save as CSV.
I only see the option to Delete
In PowerShell, you can loop through each snapshot and save them using this example:
<#
Description: Save SSRS Report Snapshots
#>
$sql = "
DECLARE #ReportName NVARCHAR(200) = 'Your Report Name'; --change to NULL for every snapshot
DECLARE #FileFormat NVARCHAR(50) = 'CSV'; --HTML5,PPTX,ATOM,HTML4.0,MHTML,IMAGE,EXCEL (for .xls),EXCELOPENXML (for .xlsx),WORD (for .doc),WORDOPENXML (for .docx),CSV,PDF,XML
DECLARE #FileExtn NVARCHAR(50) = 'csv';
DECLARE #ServerName NVARCHAR(100) = 'http://YourServerName';
DECLARE #DateFrom DATE = CAST(DATEADD(DAY, -1, GETDATE()) AS DATE); --change to NULL for every snapshot
DECLARE #ExportPath NVARCHAR(200) = 'C:\Temp\';
SELECT
--[ReportID] = [c].[itemid]
-- , [ReportName] = [c].[name]
-- , [ReportPath] = [c].[path]
-- , [SnaphsotDate] = FORMAT([h].[snapshotdate], 'dd-MMM-yyyy')
-- , [SnapshotDescription] = [s].[DESCRIPTION]
-- , [SnapshotEffectiveParams] = [s].[effectiveparams]
-- , [SnapshotQueryParams] = [s].[queryparams]
-- , [ScheduleName] = [sc].[name]
-- , [ScheduleNextRunTime] = CONVERT(VARCHAR(20), [sc].[nextruntime], 113)
[ExportFileName] = #ExportPath + REPLACE([c].[name], ' ', '_') + '_' + FORMAT([h].[snapshotdate], 'yyyyMMdd_HHmm') + '.' + #FileExtn
, [SnapshotUrl] =
#ServerName
+ '/ReportServer/Pages/ReportViewer.aspx?'
+ [c].[path] + '&rs:Command=Render&rs:Format='
+ #FileFormat + '&rs:Snapshot='
+ FORMAT([h].[snapshotdate], 'yyyy-MM-ddTHH:mm:ss')
FROM
[ReportServer].[dbo].[History] AS [h] WITH(NOLOCK)
INNER JOIN [ReportServer].[dbo].[SnapshotData] AS [s] WITH(NOLOCK) ON [h].[snapshotdataid] = [s].[snapshotdataid]
INNER JOIN [ReportServer].[dbo].[Catalog] AS [c] WITH(NOLOCK) ON [c].[itemid] = [h].[reportid]
INNER JOIN [ReportServer].[dbo].[ReportSchedule] AS [rs] WITH(NOLOCK) ON [rs].[reportid] = [h].[reportid]
INNER JOIN [ReportServer].[dbo].[Schedule] AS [sc] WITH(NOLOCK) ON [sc].[scheduleid] = [rs].[scheduleid]
WHERE
1=1
AND [rs].[reportaction] = 2
AND (#ReportName IS NULL OR [c].[Name] = #ReportName)
AND (#DateFrom IS NULL OR [h].[snapshotdate] >= CAST(DATEADD(DAY, -1, GETDATE()) AS DATE))
ORDER BY
[c].[name]
, [h].[snapshotdate];
;"
$server = 'YourServerName';
$dbs = 'MASTER';
$dsn = "Data Source=$server; Initial Catalog=$dbs; Integrated Security=SSPI;";
$cn = New-Object System.Data.SqlClient.SqlConnection($dsn);
#execute merge statement here with parameters
$cn = New-Object System.Data.SqlClient.SqlConnection($dsn);
$cn.Open();
$cmd = $cn.CreateCommand();
$cmd.CommandText = $sql
$SqlAdapter = New-Object System.Data.SqlClient.SqlDataAdapter
$SqlAdapter.SelectCommand = $cmd
$cmd.Connection = $cn
$ds = New-Object System.Data.DataSet
$SqlAdapter.Fill($ds)
$cn.Close()
$Result = $ds.Tables[0]
Foreach ($item in $Result)
{
#Write-Host $item.name
$SnapshotUrl = $item.SnapshotUrl
$ExportFileName = $item.ExportFileName
(Invoke-WebRequest -Uri $SnapshotUrl -OutFile $ExportFileName -UseDefaultCredentials -TimeoutSec 240);
}
https://learn.microsoft.com/en-us/sql/reporting-services/url-access-parameter-reference?view=sql-server-ver15
Was having trouble with powershell, so thought I'd post simplified version of my rough Python solution inspired by the resource from #aduguid's answer.
import requests
from requests_negotiate_sspi import HttpNegotiateAuth
import os
def downloadFile(url, file_name, download_folder, session):
response = session.get(url, stream=True) # open the download link
file_path = os.path.join(download_folder, file_name)
with open(file_path, 'wb') as file: # create a new file with write binary mode
for chunk in response.iter_content(chunk_size=1024):
if chunk:
file.write(chunk)
# Can also use '/Reports()' for non-linked reports.
# Can also pass in 'path="<report_path>"' instead of using id numbers,
# e.g. '.../Reports(path="/cool%20reports/my%20report")/HistorySnapshots'
api_url = r'http://<server_name>/reports/api/v2.0/LinkedReports(<item_id>)/HistorySnapshots'
session = requests.session()
session.auth = HttpNegotiateAuth() # uses windows log in
response = session.get(api_url)
hs_snapshot_list = response.json()['value']
for item_dict in hs_snapshot_list:
download_url = (r'http://<server_name>/ReportServer/Pages/ReportViewer.aspx?<report_path>'
+ '&rs:Snapshot=' + item_dict['HistoryId']
+ '&rs:Format=CSV')
downloadFile(download_url, '<your_file_name>', '<your_download_folder>', session)
SSRS API Resource:
https://app.swaggerhub.com/apis/microsoft-rs/SSRS/2.0#/Reports/GetReportHistorySnapshots

Replicate one db into another on same server and same instance in MYSQL

I just want to replicate a db into another on same machine and same mysql instance.
is it possible..??
i have tried but not working as all
server-id = 1
report_host = master-is-slave-host
log_bin = /var/log/mysqld/mysql-bin.log
relay_log = /var/log/mysqld/relay-bin
replicate-same-server-id = 1
binlog_do_db = replication_1
replicate_rewrite_db = replication_1->replication_2
replicate_do_table = replication_1.table1
replicate_do_table = replication_2.table2

Sphinx or Thinking Sphinx not returning entries I know are present

I'm trying to get Sphinx 2.1.7 working with Rails 3.2.17 app running on Ruby 1.9.3p448 on Mac OS 10.8.5. I'm using MySQL 5.1.71. I'm using the Thinking Sphinx gem 3.1.1.
I have a model called Entry and I'd like to search on columns entries.title and entries.entry (Yeah, the column containing the "content" of the Entry is called entry.)
I followed the Sphinx and Thinking Sphinx docs closely as I could.
Here's my setup...
Index definition...
entry_index.rb
ThinkingSphinx::Index.define(
:entry, :with => :active_record, :delta => true
) do
indexes(title)
indexes(entry)
has user_id
has created_at
has updated_at
end
The entries table has a column...
`delta` tinyint(1) NOT NULL DEFAULT '1'
Thinking Sphinx config...
thinking_sphinx.yml
defaults: &defaults
bin_path: '/usr/local/bin'
morphology: stem_en
enable_star: 1
min_word_len: 3
min_prefix_len: 2
min_infix_len: 2
expand_keywords: 1
index_exact_words: 1
mem_limit: 512M
development:
<<: *defaults
production:
<<: *defaults
What's going wrong...
What's going wrong is that I'll create a new Entry, for example...
Title: This is not the only Entry where I mention persimmons
Entry: I like persimmons.
Then I'll run a search and a few previous entries (from months ago) come up that contain "persimmon," but not the new one I just created.
I then shut down my app and run...
bundle exec rake ts:rebuild
And try again.
The new "persimmon" entry still doesn't come up.
Can someone tell me what I'm missing?
Any help would be greatly appreciated.
Sphinx not reporting that it's rebuilding the index
When I rebuild, I get the output below. I'm used to seeing something like "rebuilding indexes," but that isn't happening. I'm thinking thing might have something to do with it?
$ bundle exec rake ts:rebuild
Stopped searchd daemon (pid: 1976).
Generating configuration to /Users/username/Dropbox/project-dropbox/journal_application/config/development.sphinx.conf
Started searchd successfully (pid: 2007).
Sphinx config file
Here is the development.sphinx.conf generated by thinking Sphinx...
indexer
{
mem_limit = 512M
}
searchd
{
listen = 127.0.0.1:9306:mysql41
log = /Users/username/Dropbox/project-dropbox/journal_application/log/development.searchd.log
query_log = /Users/username/Dropbox/project-dropbox/journal_application/log/development.searchd.query.log
pid_file = /Users/username/Dropbox/project-dropbox/journal_application/log/development.sphinx.pid
workers = threads
binlog_path = /Users/username/Dropbox/project-dropbox/journal_application/tmp/binlog/development
}
source entry_core_0
{
type = mysql
sql_host = localhost
sql_user = root
sql_pass =
sql_db = journal_app_development
sql_query_pre = SET TIME_ZONE = '+0:00'
sql_query_pre = SET NAMES utf8
sql_query = SELECT SQL_NO_CACHE `entries`.`id` * 3 + 0 AS `id`, `entries`.`title` AS `title`, `entries`.`entry` AS `entry`, `entries`.`id` AS `sphinx_internal_id`, 'Entry' AS `sphinx_internal_class`, 0 AS `sphinx_deleted`, `entries`.`user_id` AS `user_id`, UNIX_TIMESTAMP(`entries`.`created_at`) AS `created_at`, UNIX_TIMESTAMP(`entries`.`updated_at`) AS `updated_at` FROM `entries` WHERE (`entries`.`id` BETWEEN $start AND $end) GROUP BY `entries`.`id`, `entries`.`title`, `entries`.`entry`, `entries`.`id`, `entries`.`user_id`, `entries`.`created_at`, `entries`.`updated_at` ORDER BY NULL
sql_query_range = SELECT IFNULL(MIN(`entries`.`id`), 1), IFNULL(MAX(`entries`.`id`), 1) FROM `entries`
sql_attr_uint = sphinx_internal_id
sql_attr_uint = sphinx_deleted
sql_attr_uint = user_id
sql_attr_timestamp = created_at
sql_attr_timestamp = updated_at
sql_attr_string = sphinx_internal_class
sql_query_post_index = UPDATE `entries` SET `delta` = 0 WHERE `delta` = 1
sql_query_info = SELECT `entries`.* FROM `entries` WHERE (`entries`.`id` = ($id - 0) / 3)
}
index entry_core
{
type = plain
path = /Users/username/Dropbox/project-dropbox/journal_application/db/sphinx/development/entry_core
docinfo = extern
morphology = stem_en
min_word_len = 3
charset_type = utf-8
min_prefix_len = 2
min_infix_len = 2
enable_star = 1
expand_keywords = 1
index_exact_words = 1
source = entry_core_0
}
source entry_delta_0
{
type = mysql
sql_host = localhost
sql_user = root
sql_pass =
sql_db = journal_app_development
sql_query_pre = SET TIME_ZONE = '+0:00'
sql_query_pre = SET NAMES utf8
sql_query = SELECT SQL_NO_CACHE `entries`.`id` * 3 + 0 AS `id`, `entries`.`title` AS `title`, `entries`.`entry` AS `entry`, `entries`.`id` AS `sphinx_internal_id`, 'Entry' AS `sphinx_internal_class`, 0 AS `sphinx_deleted`, `entries`.`user_id` AS `user_id`, UNIX_TIMESTAMP(`entries`.`created_at`) AS `created_at`, UNIX_TIMESTAMP(`entries`.`updated_at`) AS `updated_at` FROM `entries` WHERE (`entries`.`delta` = 1 AND `entries`.`id` BETWEEN $start AND $end) GROUP BY `entries`.`id`, `entries`.`title`, `entries`.`entry`, `entries`.`id`, `entries`.`user_id`, `entries`.`created_at`, `entries`.`updated_at` ORDER BY NULL
sql_query_range = SELECT IFNULL(MIN(`entries`.`id`), 1), IFNULL(MAX(`entries`.`id`), 1) FROM `entries` WHERE (`entries`.`delta` = 1)
sql_attr_uint = sphinx_internal_id
sql_attr_uint = sphinx_deleted
sql_attr_uint = user_id
sql_attr_timestamp = created_at
sql_attr_timestamp = updated_at
sql_attr_string = sphinx_internal_class
sql_query_info = SELECT `entries`.* FROM `entries` WHERE (`entries`.`id` = ($id - 0) / 3)
}
index entry_delta
{
type = plain
path = /Users/username/Dropbox/project-dropbox/journal_application/db/sphinx/development/entry_delta
docinfo = extern
morphology = stem_en
min_word_len = 3
charset_type = utf-8
min_prefix_len = 2
min_infix_len = 2
enable_star = 1
expand_keywords = 1
index_exact_words = 1
source = entry_delta_0
}
index entry
{
type = distributed
local = entry_core
local = entry_delta
}

Thinking Sphinx: Another index error

While indexing I get this error:
indexing index 'qtl_table_core'...
ERROR: index 'qtl_table_core': sql_range_query: 'soybase.qtl_table.QTLName' isn't in GROUP BY (DSN=mysql://_www:***#xxxxxxx/soybase).
My model:
class QtlTable < ActiveRecord::Base
....
define_index do
indexes :QTLID, :sortable => true
indexes :QTLName, :sortable => true
end
development.sphinx.conf
indexer
{
}
searchd
{
listen = 127.0.0.1:1234
log = /usr/home/benjamin/qtl/log/searchd.log
query_log = /usr/home/benjamin/qtl/log/searchd.query.log
pid_file = /usr/home/benjamin/qtl/log/searchd.development.pid
}
source qtl_table_core_0
{
type = mysql
sql_host = xxxxxxxxxxxxxx
sql_user = _www
sql_pass =
sql_db = soybase
sql_query_pre = SET NAMES utf8
sql_query_pre = SET TIME_ZONE = '+0:00'
sql_query = SELECT SQL_NO_CACHE `qtl_table`.`QTLID` * CAST(1 AS SIGNED) + 0 AS `QTLID` , `qtl_table`.`QTLID` AS `QTLID`, `qtl_table`.`QTLName` AS `QTLName`, `qtl_table`.`QTLID` AS `sphinx_internal_id`, 0 AS `sphinx_deleted`, 1786069111 AS `class_crc`, IFNULL(`qtl_table`.`QTLID`, '') AS `QTLID_sort`, IFNULL(`qtl_table`.`QTLName`, '') AS `QTLName_sort` FROM `qtl_table` WHERE (`qtl_table`.`QTLID` >= $start AND `qtl_table`.`QTLID` <= $end) GROUP BY `qtl_table`.`QTLID` ORDER BY NULL
sql_query_range = SELECT IFNULL(MIN(`QTLID`), 1), IFNULL(MAX(`QTLID`), 1) FROM `qtl_table`
sql_attr_uint = sphinx_internal_id
sql_attr_uint = sphinx_deleted
sql_attr_uint = class_crc
sql_attr_str2ordinal = QTLID_sort
sql_attr_str2ordinal = QTLName_sort
sql_query_info = SELECT * FROM `qtl_table` WHERE `QTLID` = (($id - 0) / 1)
}
index qtl_table_core
{
source = qtl_table_core_0
path = /usr/home/benjamin/qtl/db/sphinx/development/qtl_table_core
charset_type = utf-8
min_infix_len = 1
enable_star = 1
}
index qtl_table
{
type = distributed
local = qtl_table_core
}
Try adding the following inside your define_index block:
group_by "`qtl_table`.`QTLName`"

Sphinx Mysql query problem

source logs
{
type = mysql
sql_host = localhost
sql_user = root
sql_pass =
sql_db = bot
sql_port = 3306
sql_query_pre = SET NAMES utf8
sql_query = SELECT * FROM logs
sql_attr_uint = host
sql_query_info = SELECT * FROM logs WHERE id=$id
}
index logs
{
source = logs
path = D:\Webserver/Sphinx/index/logs
morphology = stem_ru, stem_en
min_word_len = 1
charset_type = utf-8
}
searchd
{
listen = 9312
log = D:\Webserver/Sphinx/log/searchd.log
query_log = D:\Webserver/Sphinx/log/query.log
pid_file = D:\Webserver/Sphinx/log/searchd.pid
}
My database:
ID | HOST | POST | URL
1 | yahoo.com | *js3s7Hs56 | http://yahoo.com
2 | google.com | 7sf6jsg73 | http://google.com/?asfaa=23
PHP Code Sphinx (search)
<?php
include('sphinxapi.php');
$cl = new SphinxClient();
$cl->SetServer( "localhost", 9312 );
$cl->SetMatchMode( SPH_MATCH_ANY );
$result = $cl->Query("google");
if ( $result === false )
{
echo "Query failed: " . $cl->GetLastError() . ".\n";
}
else
{
print_r($result);
}
This code is returned :
2
As now I'm using sphinx to withdraw all data id 2??
Sorry for bad english
You can now take that ID returned in $result and query your database with it.
Something like:
<?php
foreach ($result['IDs'] as $ID) {
$r = mysqli_query('SELECT * FROM `table` WHERE `ID` = ' . $ID);
# Handle $r
}
# Or, more efficiently (depending on how many results you have):
$IDs = implode(',',array_map('intval',$result['IDs']));
$r = mysqli_query('SELECT * FROM `table` WHERE `ID` IN (' . $IDs . ')');
# Handle $r