CREATE PROCEDURE SP_poradi()
BEGIN
DECLARE s_poradi INT(5);
DECLARE s_body INT(5);
DECLARE temp_body INT(5) DEFAULT 0;
DECLARE s_tym VARCHAR(50);
DECLARE my_rank INT(5) DEFAULT 0;
DECLARE done TINYINT(1) DEFAULT 0;
DECLARE i INT;
DECLARE rank CURSOR FOR
SELECT tym, body, poradi FROM poradi
ORDER BY body DESC;
DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = 1;
SET i = 0;
OPEN rank;
rank_loop: LOOP
FETCH rank INTO s_tym, s_body, s_poradi;
IF done THEN LEAVE rank_loop; END IF;
IF (temp_body <> s_body)
THEN UPDATE poradi SET poradi = i + 1 WHERE tym = s_tym;
ELSE
UPDATE poradi SET poradi = i + 0 WHERE tym = s_tym;
END IF;
SET i=i+1;
SET temp_body = s_body;
END LOOP rank_loop;
CLOSE rank;
END;
CREATE TRIGGER trRaiseTopicCountAll
AFTER INSERT ON forum_commentary
FOR EACH ROW
BEGIN
UPDATE forum_topic
SET forum_commentary_count_all = @forum_commentary_count_all +1
WHERE forum_topic_id = NEW.forum_topic_id;
IF (NEW.commentary_parent_id IS NULL)
THEN
UPDATE forum_topic
SET first_commentary_id = NEW.forum_commentary_id
WHERE forum_topic_id = NEW.forum_topic_id;
END IF;
END;
CREATE TRIGGER trRaiseTopicCountVisible
AFTER UPDATE ON forum_commentary
FOR EACH ROW
BEGIN
IF (OLD.forum_commentary_status_id <> NEW.forum_commentary_status_id)
THEN
IF (NEW.forum_commentary_status_id = 2)
THEN
UPDATE forum_topic
SET forum_commentary_count_visible = @forum_commentary_count_visible +1
WHERE forum_topic_id = NEW.forum_topic_id;
ELSE
UPDATE forum_topic
SET forum_commentary_count_visible = @forum_commentary_count_visible -1
WHERE forum_topic_id = NEW.forum_topic_id;
END IF;
END IF;
END;
-- =============================================================================
-- Find Duplicate Input With MySQL
-- http://kvz.io/blog/2013/03/04/find-duplicate-input-with-mysql/
--
-- At my company we have employees creating customer accounts every day.
-- Sometimes we make mistakes, for instance, we forget to check if the company
-- already was a customer (maybe 10y ago they may have had a product).
--
-- Duplicate accounts can cause all sorts of problems, so I wanted way to detect
-- them with SQL.
--
-- The problem was, the company names may have been entered with different
-- punctuation, whitespace, etc. So I needed similar names to surface from the
-- depths of our database, not just exact matches (that would have been too easy
-- :)
--
-- For the solution I turned to SOUNDEX for fuzzy matching similar sounding
-- company names, and then review the results myself (false positives are
-- possible, but since they would be few, it becomes a simple task to
-- doublecheck) and report back to our company.
--
-- I thought I'd share
--
-- * partly because it could be useful to others (obviously this could be used
-- to detect all kinds of user generated typos and similar entries);
-- * mostly because I'm curious to find if there is a better (more performant)
-- way to write this query.
-- =============================================================================
--
-- Select all the individual company names that have a
-- soundex_code that occurs more than once (I now use a subquery for that)
SELECT
`id`,
`customer_name`,
SOUNDEX(`customer_name`) AS soundex_code
FROM `customers`
WHERE SOUNDEX(`customer_name`) IN (
-- Subquery: select all soundex_codes that occur more than once,
-- (this does not return the individual company names that share them)
SELECT SOUNDEX(`customer_name`) AS soundex_code
FROM `customers`
WHERE 1 = 1
AND `is_active` = 1
-- More specific criteria to define who you want to compare
GROUP BY soundex_code
HAVING COUNT(*) > 1
)
ORDER BY
soundex_code,
`customer_name`
--
-- This e.g. returns:
--
-- -----|------------------|------------------
-- id customer_name soundex_code
-- -----|------------------|------------------
-- 291 F.S. Hosting F2352
-- 1509 FS hosting F2352
-- 9331 R Schmit R253
-- 9332 R Schmit R253
# To connect to a database:
mysql database_name
# To connect to a database, user will be prompted for a password:
mysql -u user --password database_name
# To connect to a database on another host:
mysql -h database_host database_name
# To connect to a database through a Unix socket:
mysql --socket path/to/socket.sock
# To execute SQL statements in a script file (batch file):
mysql database_name < script.sql
# ---
# To connect to a database
$ mysql -h localhost -u root -p
# To backup all databases
$ mysqldump --all-databases --all-routines -u root -p > ~/fulldump.sql
# To restore all databases
$ mysql -u root -p < ~/fulldump.sql
# Create database with utf-8 collation and utf-8 charset
$ mysql> create database <database_name> character set utf8 collate utf8_general_ci;
# Create a table with an auto-incrementing primary key (id), w/ utf-8
$ mysql> create table <table_name> (`id` int unsigned auto_increment, primary key (`id`)) default character set utf8 collate utf8_general_ci;
# List all databases on the sql server.
$ mysql> show databases;
# To dump a database to a file (Note that your password will appear in your command history!):
mysqldump -uusername -ppassword the-database > db.sql
# To dump a database to a file:
mysqldump -uusername -p the-database > db.sql
# To dump a database to a .tgz file (Note that your password will appear in your command history!):
mysqldump -uusername -ppassword the-database | gzip -9 > db.sql
# To dump a database to a .tgz file:
mysqldump -uusername -p the-database | gzip -9 > db.sql
# To dump all databases to a file (Note that your password will appear in your command history!):
mysqldump -uusername -ppassword --all-databases > all-databases.sql
# To dump all databases to a file:
mysqldump -uusername -p --all-databases > all-databases.sql
# To export the database structure only:
mysqldump --no-data -uusername -p the-database > dump_file
# To export the database data only:
mysqldump --no-create-info -uusername -p the-database > dump_file
Select Concat( 'update ', table_schema, '.', table_name,
' set ', column_name,
'=replace(', column_name, ',''old_text'',''new_text'');'
)
From information_schema.columns
Where (data_type Like '%char%' or data_type like '%text' or data_type like '%binary')
And table_schema = 'dbname';