Question : Excluding duplicates from a query in t-sql to use in MS Visual Studio

/*When a customer rings the business they have to get a coverage check of where they live
I want to be able to get a count of how many customer have Passed and Failed the GIS coverage TEST.
However in some cases i will get back a customer account where they will have muniple pass and failure test but i only want to pick the latest one not the duplicates
And in some cases the Dates are the same! Currently we are pulling the data and then doing a TRUE/FALSE on accounts and highlighting the rows that are duplicates
and not including them in the pivot. However i want to create a matrix using M.S. Visual Studio to get the total count of Passed and Failed but i don't know how to
avoide the Duplicates. Could anyone please advise me in what to do?

I have created some example tables with input and i have the code at the bottom which pulls all the rows i want.
I just don't know how to get duplicates in the code or exclude duplicates in the code.
*/

create table GIS_INFO
( account_id nvarchar(10) not null,
gis_date datetime,
GIS_Queue datetime,
coverage_status_id int )
insert into gis_info values (511753,'2006-07-14 00:00:00.000',1);
insert into gis_info values (511753,'2007-08-01 00:00:00.000',1);
insert into gis_info values (543573,'2007-08-02 00:00:00.000',2);
insert into gis_info values (543573,'2007-08-02 00:00:00.000',1);
insert into gis_info values (543573,'2007-08-02 00:00:00.000',2);
insert into gis_info values (543573,'2007-08-02 00:00:00.000',1);
insert into gis_info values (543573,'2007-08-02 00:00:00.000',2);
insert into gis_info values (543573,'2007-08-02 00:00:00.000',2);

Create table COVERAGE_STATUS
(status_id int not null,
status_name nvarchar(10));

insert into COVERAGE_STATUS values (1,'Passed');
insert into COVERAGE_STATUS values (2,'Failed');

Create table ACCOUNT
(account_id nvarchar(10)not null,
date_entered datetime );

Insert into Account values (511753,'2006-11-07 00:00:00.000');
Insert into Account values (543573,'2006-11-22 00:00:00.000');


Create table ACCOUNT_STATUS_HISTORY
(account_status_history_id int not null,
account_id nvarchar(10) not null,
account_status_id int,
account_status_date datetime);

Insert into ACCOUNT_STATUS_HISTORY values(196529,543573,28,'2006-11-22 10:20:00.000');
Insert into ACCOUNT_STATUS_HISTORY values(196532,543573,27,'2006-11-22 10:21:03.000');
Insert into ACCOUNT_STATUS_HISTORY values(198074,543573,25,'2006-11-23 15:22:26.000');
Insert into ACCOUNT_STATUS_HISTORY values(203308,543573,26,'2006-11-29 14:37:38.000');
Insert into ACCOUNT_STATUS_HISTORY values(210299,543573,1,'2006-12-07 10:39:19.000');
Insert into ACCOUNT_STATUS_HISTORY values(388622,543573,3,'2007-07-24 10:03:46.000');
Insert into ACCOUNT_STATUS_HISTORY values(394261,543573,1,'2007-08-01 17:45:50.000');
Insert into ACCOUNT_STATUS_HISTORY values(394262,543573,27,'2007-08-01 17:45:50.000');
Insert into ACCOUNT_STATUS_HISTORY values(394263,543573,1,'2007-08-01 17:46:00.000');
Insert into ACCOUNT_STATUS_HISTORY values(394266,543573,3,'2007-08-01 17:48:53.000');
Insert into ACCOUNT_STATUS_HISTORY values(394267,543573,1,      '2007-08-01 17:49:17.000');
Insert into ACCOUNT_STATUS_HISTORY values(394268,543573,27,      '2007-08-01 17:49:17.000');
Insert into ACCOUNT_STATUS_HISTORY values(394269,543573,1,      '2007-08-01 17:50:19.000');
Insert into ACCOUNT_STATUS_HISTORY values(394448,543573,7,      '2007-08-02 09:16:16.000');
Insert into ACCOUNT_STATUS_HISTORY values(394449,543573,25,      '2007-08-02 09:16:46.000');
Insert into ACCOUNT_STATUS_HISTORY values(398181,543573,26,      '2007-08-09 12:40:36.000');
Insert into ACCOUNT_STATUS_HISTORY values(398193,543573,1,      '2007-08-09 12:52:24.000');

Insert into ACCOUNT_STATUS_HISTORY values(394189,511753,3,'2007-08-01 16:53:48.000');
Insert into ACCOUNT_STATUS_HISTORY values(394191,511753,1,'2007-08-01 16:53:59.000');
Insert into ACCOUNT_STATUS_HISTORY values(394192,511753,27,'2007-08-01 16:53:59.000');
Insert into ACCOUNT_STATUS_HISTORY values(394193,511753,1,'2007-08-01 16:54:05.000');
Insert into ACCOUNT_STATUS_HISTORY values(394205,511753,25,'2007-08-01 17:00:42.000');
Insert into ACCOUNT_STATUS_HISTORY values(394207,511753,1,'2007-08-01 17:01:10.000');
Insert into ACCOUNT_STATUS_HISTORY values(397237,511753,3,'2007-08-08 08:44:19.000');
Insert into ACCOUNT_STATUS_HISTORY values(397969,511753,7,'2007-08-09 09:45:04.000');
Insert into ACCOUNT_STATUS_HISTORY values(71418,511753,      28,      '2006-07-11 10:12:08.000');
Insert into ACCOUNT_STATUS_HISTORY values(72623,511753,      27,      '2006-07-12 11:53:32.000');
Insert into ACCOUNT_STATUS_HISTORY values(74725,511753,      25,      '2006-07-14 15:18:54.000');
Insert into ACCOUNT_STATUS_HISTORY values(74807,511753,      26,      '2006-07-14 15:43:12.000');
Insert into ACCOUNT_STATUS_HISTORY values(75903,511753,      1,      '2006-07-17 17:41:36.000');


--BOSS GIS

SELECT  
  Distinct a.account_id,
  cov.status_name AS Coverage_check


FROM        
GIS_INFO AS g
LEFT OUTER JOIN COVERAGE_STATUS AS cov ON cov.status_id = g.coverage_status_id

--To get accounts in GIS STATUS
LEFT OUTER JOIN ACCOUNT AS a ON a.account_id = g.account_id
INNER JOIN      (SELECT account_status_history_id, account_id, account_status_id, account_status_date
                            FROM  ACCOUNT_STATUS_HISTORY
                                               --27 is the GIS STATUS
                            WHERE      (account_status_id = 27)) AS acc_his ON acc_his.account_id = a.account_id


AND (a.account_id IS NOT NULL)
Order by a.account_id


/*The results i get are:
 511753      Passed
 543573      Failed
 543573      Passed

However i want to be able to see that 543573 has a duplicate and exclude the first one and only return these rows:
511753 Passed
543573 Passed

Just to clarify Not all duplicates will be Passed some will be Failed depending on what way it is entered
All Advise greatly appriciated*/

Kind Regards,
Putoch

Answer : Excluding duplicates from a query in t-sql to use in MS Visual Studio


Select x.max(Coverage_check), x.account_id from (your select here) x
group by x.account_id
Random Solutions  
 
programming4us programming4us