Search Results

Search found 15400 results on 616 pages for 'log4net configuration'.

Page 231/616 | < Previous Page | 227 228 229 230 231 232 233 234 235 236 237 238  | Next Page >

  • how to create a DataAccessLayer ?

    - by NIGHIL DAS
    hi, i am creating a database applicatin in .Net. I am using a DataAccessLayer for communicating .net objects with database but i am not sure that this class is correct or not Can anyone cross check it and rectify any mistakes namespace IDataaccess { #region Collection Class public class SPParamCollection : List<SPParams> { } public class SPParamReturnCollection : List<SPParams> { } #endregion #region struct public struct SPParams { public string Name { get; set; } public object Value { get; set; } public ParameterDirection ParamDirection { get; set; } public SqlDbType Type { get; set; } public int Size { get; set; } public string TypeName { get; set; } // public string datatype; } #endregion /// <summary> /// Interface DataAccess Layer implimentation New version /// </summary> public interface IDataAccess { DataTable getDataUsingSP(string spName); DataTable getDataUsingSP(string spName, SPParamCollection spParamCollection); DataSet getDataSetUsingSP(string spName); DataSet getDataSetUsingSP(string spName, SPParamCollection spParamCollection); SqlDataReader getDataReaderUsingSP(string spName); SqlDataReader getDataReaderUsingSP(string spName, SPParamCollection spParamCollection); int executeSP(string spName); int executeSP(string spName, SPParamCollection spParamCollection, bool addExtraParmas); int executeSP(string spName, SPParamCollection spParamCollection); DataTable getDataUsingSqlQuery(string strSqlQuery); int executeSqlQuery(string strSqlQuery); SPParamReturnCollection executeSPReturnParam(string spName, SPParamReturnCollection spParamReturnCollection); SPParamReturnCollection executeSPReturnParam(string spName, SPParamCollection spParamCollection, SPParamReturnCollection spParamReturnCollection); SPParamReturnCollection executeSPReturnParam(string spName, SPParamCollection spParamCollection, SPParamReturnCollection spParamReturnCollection, bool addExtraParmas); int executeSPReturnParam(string spName, SPParamCollection spParamCollection, ref SPParamReturnCollection spParamReturnCollection); object getScalarUsingSP(string spName); object getScalarUsingSP(string spName, SPParamCollection spParamCollection); } } using IDataaccess; namespace Dataaccess { /// <summary> /// Class DataAccess Layer implimentation New version /// </summary> public class DataAccess : IDataaccess.IDataAccess { #region Public variables static string Strcon; DataSet dts = new DataSet(); public DataAccess() { Strcon = sReadConnectionString(); } private string sReadConnectionString() { try { //dts.ReadXml("C:\\cnn.config"); //Strcon = dts.Tables[0].Rows[0][0].ToString(); //System.Configuration.Configuration config = ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.None); //Strcon = config.ConnectionStrings.ConnectionStrings["connectionString"].ConnectionString; // Add an Application Setting. //Strcon = "Data Source=192.168.50.103;Initial Catalog=erpDB;User ID=ipixerp1;Password=NogoXVc3"; Strcon = System.Configuration.ConfigurationManager.AppSettings["connection"]; //Strcon = System.Configuration.ConfigurationSettings.AppSettings[0].ToString(); } catch (Exception) { } return Strcon; } public SqlConnection connection; public SqlCommand cmd; public SqlDataAdapter adpt; public DataTable dt; public int intresult; public SqlDataReader sqdr; #endregion #region Public Methods public DataTable getDataUsingSP(string spName) { return getDataUsingSP(spName, null); } public DataTable getDataUsingSP(string spName, SPParamCollection spParamCollection) { try { using (connection = new SqlConnection(Strcon)) { connection.Open(); using (cmd = new SqlCommand(spName, connection)) { int count, param = 0; if (spParamCollection == null) { param = -1; } else { param = spParamCollection.Count; } for (count = 0; count < param; count++) { cmd.Parameters.AddWithValue(spParamCollection[count].Name, spParamCollection[count].Value); } cmd.CommandType = CommandType.StoredProcedure; cmd.CommandTimeout = 60; adpt = new SqlDataAdapter(cmd); dt = new DataTable(); adpt.Fill(dt); return (dt); } } } finally { connection.Close(); } } public DataSet getDataSetUsingSP(string spName) { return getDataSetUsingSP(spName, null); } public DataSet getDataSetUsingSP(string spName, SPParamCollection spParamCollection) { try { using (connection = new SqlConnection(Strcon)) { connection.Open(); using (cmd = new SqlCommand(spName, connection)) { int count, param = 0; if (spParamCollection == null) { param = -1; } else { param = spParamCollection.Count; } for (count = 0; count < param; count++) { cmd.Parameters.AddWithValue(spParamCollection[count].Name, spParamCollection[count].Value); } cmd.CommandType = CommandType.StoredProcedure; cmd.CommandTimeout = 60; adpt = new SqlDataAdapter(cmd); DataSet ds = new DataSet(); adpt.Fill(ds); return ds; } } } finally { connection.Close(); } } public SqlDataReader getDataReaderUsingSP(string spName) { return getDataReaderUsingSP(spName, null); } public SqlDataReader getDataReaderUsingSP(string spName, SPParamCollection spParamCollection) { try { using (connection = new SqlConnection(Strcon)) { connection.Open(); using (cmd = new SqlCommand(spName, connection)) { int count, param = 0; if (spParamCollection == null) { param = -1; } else { param = spParamCollection.Count; } for (count = 0; count < param; count++) { cmd.Parameters.AddWithValue(spParamCollection[count].Name, spParamCollection[count].Value); } cmd.CommandType = CommandType.StoredProcedure; cmd.CommandTimeout = 60; sqdr = cmd.ExecuteReader(); return (sqdr); } } } finally { connection.Close(); } } public int executeSP(string spName) { return executeSP(spName, null); } public int executeSP(string spName, SPParamCollection spParamCollection, bool addExtraParmas) { try { using (connection = new SqlConnection(Strcon)) { connection.Open(); using (cmd = new SqlCommand(spName, connection)) { int count, param = 0; if (spParamCollection == null) { param = -1; } else { param = spParamCollection.Count; } for (count = 0; count < param; count++) { SqlParameter par = new SqlParameter(spParamCollection[count].Name, spParamCollection[count].Value); if (addExtraParmas) { par.TypeName = spParamCollection[count].TypeName; par.SqlDbType = spParamCollection[count].Type; } cmd.Parameters.Add(par); } cmd.CommandType = CommandType.StoredProcedure; cmd.CommandTimeout = 60; return (cmd.ExecuteNonQuery()); } } } finally { connection.Close(); } } public int executeSP(string spName, SPParamCollection spParamCollection) { return executeSP(spName, spParamCollection, false); } public DataTable getDataUsingSqlQuery(string strSqlQuery) { try { using (connection = new SqlConnection(Strcon)) connection.Open(); { using (cmd = new SqlCommand(strSqlQuery, connection)) { cmd.CommandType = CommandType.Text; cmd.CommandTimeout = 60; adpt = new SqlDataAdapter(cmd); dt = new DataTable(); adpt.Fill(dt); return (dt); } } } finally { connection.Close(); } } public int executeSqlQuery(string strSqlQuery) { try { using (connection = new SqlConnection(Strcon)) { connection.Open(); using (cmd = new SqlCommand(strSqlQuery, connection)) { cmd.CommandType = CommandType.Text; cmd.CommandTimeout = 60; intresult = cmd.ExecuteNonQuery(); return (intresult); } } } finally { connection.Close(); } } public SPParamReturnCollection executeSPReturnParam(string spName, SPParamReturnCollection spParamReturnCollection) { return executeSPReturnParam(spName, null, spParamReturnCollection); } public int executeSPReturnParam() { return 0; } public int executeSPReturnParam(string spName, SPParamCollection spParamCollection, ref SPParamReturnCollection spParamReturnCollection) { try { SPParamReturnCollection spParamReturned = new SPParamReturnCollection(); using (connection = new SqlConnection(Strcon)) { connection.Open(); using (cmd = new SqlCommand(spName, connection)) { int count, param = 0; if (spParamCollection == null) { param = -1; } else { param = spParamCollection.Count; } for (count = 0; count < param; count++) { cmd.Parameters.AddWithValue(spParamCollection[count].Name, spParamCollection[count].Value); } cmd.CommandType = CommandType.StoredProcedure; foreach (SPParams paramReturn in spParamReturnCollection) { SqlParameter _parmReturn = new SqlParameter(paramReturn.Name, paramReturn.Size); _parmReturn.Direction = paramReturn.ParamDirection; if (paramReturn.Size > 0) _parmReturn.Size = paramReturn.Size; else _parmReturn.Size = 32; _parmReturn.SqlDbType = paramReturn.Type; cmd.Parameters.Add(_parmReturn); } cmd.CommandTimeout = 60; intresult = cmd.ExecuteNonQuery(); connection.Close(); //for (int i = 0; i < spParamReturnCollection.Count; i++) //{ // spParamReturned.Add(new SPParams // { // Name = spParamReturnCollection[i].Name, // Value = cmd.Parameters[spParamReturnCollection[i].Name].Value // }); //} } } return intresult; } finally { connection.Close(); } } public SPParamReturnCollection executeSPReturnParam(string spName, SPParamCollection spParamCollection, SPParamReturnCollection spParamReturnCollection) { return executeSPReturnParam(spName, spParamCollection, spParamReturnCollection, false); } public SPParamReturnCollection executeSPReturnParam(string spName, SPParamCollection spParamCollection, SPParamReturnCollection spParamReturnCollection, bool addExtraParmas) { try { SPParamReturnCollection spParamReturned = new SPParamReturnCollection(); using (connection = new SqlConnection(Strcon)) { connection.Open(); using (cmd = new SqlCommand(spName, connection)) { int count, param = 0; if (spParamCollection == null) { param = -1; } else { param = spParamCollection.Count; } for (count = 0; count < param; count++) { //cmd.Parameters.AddWithValue(spParamCollection[count].Name, spParamCollection[count].Value); SqlParameter par = new SqlParameter(spParamCollection[count].Name, spParamCollection[count].Value); if (addExtraParmas) { par.TypeName = spParamCollection[count].TypeName; par.SqlDbType = spParamCollection[count].Type; } cmd.Parameters.Add(par); } cmd.CommandType = CommandType.StoredProcedure; foreach (SPParams paramReturn in spParamReturnCollection) { SqlParameter _parmReturn = new SqlParameter(paramReturn.Name, paramReturn.Value); _parmReturn.Direction = paramReturn.ParamDirection; if (paramReturn.Size > 0) _parmReturn.Size = paramReturn.Size; else _parmReturn.Size = 32; _parmReturn.SqlDbType = paramReturn.Type; cmd.Parameters.Add(_parmReturn); } cmd.CommandTimeout = 60; cmd.ExecuteNonQuery(); connection.Close(); for (int i = 0; i < spParamReturnCollection.Count; i++) { spParamReturned.Add(new SPParams { Name = spParamReturnCollection[i].Name, Value = cmd.Parameters[spParamReturnCollection[i].Name].Value }); } } } return spParamReturned; } catch (Exception ex) { return null; } finally { connection.Close(); } } public object getScalarUsingSP(string spName) { return getScalarUsingSP(spName, null); } public object getScalarUsingSP(string spName, SPParamCollection spParamCollection) { try { using (connection = new SqlConnection(Strcon)) { connection.Open(); using (cmd = new SqlCommand(spName, connection)) { int count, param = 0; if (spParamCollection == null) { param = -1; } else { param = spParamCollection.Count; } for (count = 0; count < param; count++) { cmd.Parameters.AddWithValue(spParamCollection[count].Name, spParamCollection[count].Value); cmd.CommandTimeout = 60; } cmd.CommandType = CommandType.StoredProcedure; return cmd.ExecuteScalar(); } } } finally { connection.Close(); cmd.Dispose(); } } #endregion } }

    Read the article

  • Can I use pdb files to step through a 3rd party assembly?

    - by Pure.Krome
    Hi folks, my friend has made a really helpful class library which I use all the time. I usually use Reflector to see what his code does. What I really wanted to do was to step through his code while I'm debugging. So he gave me his .pdb file. Foo.dll (release configuration, compile) Foo.pdb Now, I'm not sure how I can get it to auto break into his code when it throws an exception (his code, at various points, thorws exceptions .. like A first chance exception of type 'System.Web.HttpException' occurred in Foo.dll ... Can I do this? Do i need to setup something with the Symbol Server settings in Visual Studio ? Do i need to get the dll compiled into Debug Configuration and be passed the .dll and .pdb files? Or (and i'm really afraid of this one) .. do i need to have both the .dll, .pdb AND his source code ... I also had a look at this previous SO question, but it sorta didn't help (but proof I've tried to search before asking a question). Can someone help me please?

    Read the article

  • Forms Authentication & Virtual Directory

    - by benclaytonfranklin
    Hi, We're having trouble getting Forms Authentication to work with a virtual directory in IIS. We have a main site, and then a microsite setup within a virtual directory. This mircosite has its own admin system within an "Admin" folder, which has authentication on it but currently it is not kicking in and the admin section is browsable by anyone. The web.config with the admin folder has the following: <?xml version="1.0"?> <configuration> <appSettings/> <connectionStrings/> <system.web> <authorization> <deny users="?"/> </authorization> <customErrors mode="RemoteOnly" defaultRedirect="~/Admin/Error.aspx"/> </system.web> </configuration> Could anyone give me any clues as to why this might not be working? Cheers!

    Read the article

  • Trouble with object injection in Spring.Net

    - by Abdel Olakara
    Hi all, I have a issue with my Spring.Net configuration where its not injecting an object. I have a CommService to which an object named GeneralEmail is injected to. Here is the configuration: <!-- GeneralMail Object --> <object id="GeneralMailObject" type="CommUtil.Email.GeneralEmail, CommUtil"> <constructor-arg name="host" value="xxxxx.com"/> <constructor-arg name="port" value="25"/> <constructor-arg name="user" value="[email protected]"/> <constructor-arg name="password" value="xxxxx"/> <constructor-arg name="template" value="xxxxx"/> </object> <!-- Communication Service --> <object id="CommServiceObject" type="TApp.Code.Services.CommService, TApp"> <property name="emailService" ref="GeneralMailObject" /> </object> The communication service object is again injected to many other aspx pages & service. In one scenario, I need to call the commnucation service from an static WebMethod. I try doing: CommService cso = new CommService(); But when i try to get the emailService object, its null! why didn't the spring inject the GeneralMail object into my cso object? What am I doing wrong and how do I access the object from spring container. Thanks in advance for the suggestions and solutions. Reagrds, Abdel Olakara

    Read the article

  • Moving .NET assemblies away from the application base directory?

    - by RasmusKL
    I have a WinForms application with a bunch of third party references. This makes the output folder quite messy. I'd like to place the compiled / referenced dlls into a common subdirectory in the output folder, bin / lib - whatever - and just have the executables (+ needed configs etc) reside in the output folder. After some searching I ran into assembly probing (http://msdn.microsoft.com/en-us/library/4191fzwb.aspx) - and verified that if I set this up and manually move the assemblies my application will still work if they are stored in the designated subdirectory like so: <configuration> <runtime> <assemblyBinding xmlns="urn:schemas-microsoft-com:asm.v1"> <probing privatePath="bin" /> </assemblyBinding> </runtime> </configuration> However, this doesn't solve the build part - is there any way to specify where referenced assemblies and compiled library assemblies go? Only solutions I can think of off the top of my head is either post-build actions or dropping the idea and using ILMerge or something. There has got to be a better way of defining the structure :-)

    Read the article

  • HDFS some datanodes of cluster are suddenly disconnected while reducers are running

    - by user1429825
    I have 8 slave computers and 1 master computer for running Hadoop (ver 0.21) some datanodes of cluster are suddenly disconnected while I was running MapReduce code on 10GB data After all mappers finished and around 80% of reducers was processed, randomly one or more datanode disconned from network. and then the other datanodes start to disappear from network even if I killed the MapReduce job when I found some datanode was disconnected. I've tried to change dfs.datanode.max.xcievers to 4096, turned off fire-walls of all computing node, disabled selinux and increased the number of file open limit to 20000 but they didn't work at all... anyone have a idea to solve this problem? followings are error log from mapreduce 12/06/01 12:31:29 INFO mapreduce.Job: Task Id : attempt_201206011227_0001_r_000006_0, Status : FAILED java.io.IOException: Bad connect ack with firstBadLink as ***.***.***.148:20010 at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.createBlockOutputStream(DFSOutputStream.java:889) at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:820) at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:427) and followings are logs from datanode 2012-06-01 13:01:01,118 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Receiving block blk_-5549263231281364844_3453 src: /*.*.*.147:56205 dest: /*.*.*.142:20010 2012-06-01 13:01:01,136 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeRegistration(*.*.*.142:20010, storageID=DS-1534489105-*.*.*.142-20010-1337757934836, infoPort=20075, ipcPort=20020) Starting thread to transfer block blk_-3849519151985279385_5906 to *.*.*.147:20010 2012-06-01 13:01:19,135 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeRegistration(*.*.*.142:20010, storageID=DS-1534489105-*.*.*.142-20010-1337757934836, infoPort=20075, ipcPort=20020):Failed to transfer blk_-5797481564121417802_3453 to *.*.*.146:20010 got java.net.ConnectException: > Connection timed out at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method) at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:701) at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206) at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:373) at org.apache.hadoop.hdfs.server.datanode.DataNode$DataTransfer.run(DataNode.java:1257) at java.lang.Thread.run(Thread.java:722) 2012-06-01 13:06:20,342 INFO org.apache.hadoop.hdfs.server.datanode.DataBlockScanner: Verification succeeded for blk_6674438989226364081_3453 2012-06-01 13:09:01,781 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeRegistration(*.*.*.142:20010, storageID=DS-1534489105-*.*.*.142-20010-1337757934836, infoPort=20075, ipcPort=20020):Failed to transfer blk_-3849519151985279385_5906 to *.*.*.147:20010 got java.net.SocketTimeoutException: 480000 millis timeout while waiting for channel to be ready for write. ch : java.nio.channels.SocketChannel[connected local=/*.*.*.142:60057 remote=/*.*.*.147:20010] at org.apache.hadoop.net.SocketIOWithTimeout.waitForIO(SocketIOWithTimeout.java:246) at org.apache.hadoop.net.SocketOutputStream.waitForWritable(SocketOutputStream.java:164) at org.apache.hadoop.net.SocketOutputStream.transferToFully(SocketOutputStream.java:203) at org.apache.hadoop.hdfs.server.datanode.BlockSender.sendChunks(BlockSender.java:388) at org.apache.hadoop.hdfs.server.datanode.BlockSender.sendBlock(BlockSender.java:476) at org.apache.hadoop.hdfs.server.datanode.DataNode$DataTransfer.run(DataNode.java:1284) at java.lang.Thread.run(Thread.java:722) hdfs-site.xml <configuration> <property> <name>dfs.name.dir</name> <value>/home/hadoop/data/name</value> </property> <property> <name>dfs.data.dir</name> <value>/home/hadoop/data/hdfs1,/home/hadoop/data/hdfs2,/home/hadoop/data/hdfs3,/home/hadoop/data/hdfs4,/home/hadoop/data/hdfs5</value> </property> <property> <name>dfs.replication</name> <value>3</value> </property> <property> <name>dfs.datanode.max.xcievers</name> <value>4096</value> </property> <property> <name>dfs.http.address</name> <value>0.0.0.0:20070</value> <description>50070 The address and the base port where the dfs namenode web ui will listen on. If the port is 0 then the server will start on a free port. </description> </property> <property> <name>dfs.datanode.http.address</name> <value>0.0.0.0:20075</value> <description>50075 The datanode http server address and port. If the port is 0 then the server will start on a free port. </description> </property> <property> <name>dfs.secondary.http.address</name> <value>0.0.0.0:20090</value> <description>50090 The secondary namenode http server address and port. If the port is 0 then the server will start on a free port. </description> </property> <property> <name>dfs.datanode.address</name> <value>0.0.0.0:20010</value> <description>50010 The address where the datanode server will listen to. If the port is 0 then the server will start on a free port. </description> <property> <name>dfs.datanode.ipc.address</name> <value>0.0.0.0:20020</value> <description>50020 The datanode ipc server address and port. If the port is 0 then the server will start on a free port. </description> </property> <property> <name>dfs.datanode.https.address</name> <value>0.0.0.0:20475</value> </property> <property> <name>dfs.https.address</name> <value>0.0.0.0:20470</value> </property> </configuration> mapred-site.xml <configuration> <property> <name>mapred.job.tracker</name> <value>masternode:29001</value> </property> <property> <name>mapred.system.dir</name> <value>/home/hadoop/data/mapreduce/system</value> </property> <property> <name>mapred.local.dir</name> <value>/home/hadoop/data/mapreduce/local</value> </property> <property> <name>mapred.map.tasks</name> <value>32</value> <description> default number of map tasks per job.</description> </property> <property> <name>mapred.tasktracker.map.tasks.maximum</name> <value>4</value> </property> <property> <name>mapred.reduce.tasks</name> <value>8</value> <description> default number of reduce tasks per job.</description> </property> <property> <name>mapred.map.child.java.opts</name> <value>-Xmx2048M</value> </property> <property> <name>io.sort.mb</name> <value>500</value> </property> <property> <name>mapred.task.timeout</name> <value>1800000</value> <!-- 30 minutes --> </property> <property> <name>mapred.job.tracker.http.address</name> <value>0.0.0.0:20030</value> <description> 50030 The job tracker http server address and port the server will listen on. If the port is 0 then the server will start on a free port. </description> </property> <property> <name>mapred.task.tracker.http.address</name> <value>0.0.0.0:20060</value> <description> 50060 </property> </configuration>

    Read the article

  • Uploading Binary iPhone App "The signature was invalid" again again and again...

    - by user338386
    Hello! I'm going crazy! I'm trying to upload the binary of my first application but I have always the same error! "The binary you uploaded was invalid. The signature was invalid, or it was not signed with an Apple submission certificate." I did everything, EVERYTHING!! I created the request for the certificate, used it for both developer and distribution certificate, created the provisioning profile (12 times!!!) always cleaning my keychain and my Xcode deleting the old certificates and profiles.. I reboot the machine, reboot Xcode, the log is correct, but... I can't upload my app!!!! Checked if my iPhone is connected (i tried with iPhone disconneted too). I checked the certificate in both my project settings "Distribuition" Configuration (duplicate of "Release" configuration) and in my target settings. Reveal in finder, compress the app and sent the zip... I tried with Application Loader and iTunes connect online.. but nothing! NOTHING!! I've spent 8 hours! And again i can't have my app uploaded!!! I'm really going crazy! Can anyone help me pleeease? Thx!

    Read the article

  • Exporting Maven properties from Ant code

    - by Gili
    I've embedded the following code within my POM: <plugin name="test"> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-antrun-plugin</artifactId> <executions> <execution> <phase>validate</phase> <configuration> <tasks> <pathconvert targetos="unix" property="project.build.directory.portable"> <path location="${project.build.directory}"/> </pathconvert> </tasks> </configuration> <goals> <goal>run</goal> </goals> </execution> </executions> </plugin> I then reference ${project.build.directory.portable} from the run project action but it comes back as null. Executing <echo> within the Ant block shows the correct value. What am I doing wrong?

    Read the article

  • Publishing a WCF Server and client and their endpoints

    - by Ahmadreza
    Imagine developing a WCF solution with two projects (WCF Service/ and web application as WCF Client). As long as I'm developing these two projects in visual studio and referencing service to client (Web Application) as server reference there is no problem. Visual studio automatically assign a port for WCF server and configure all needed configuration including Server And Client binging to something like this in server: <service behaviorConfiguration="DefaultServiceBehavior" name="MYWCFProject.MyService"> <endpoint address="" binding="wsHttpBinding" contract="MYWCFProject.IMyService"> <identity> <dns value="localhost" /> </identity> </endpoint> <host> <baseAddresses> <add baseAddress="http://localhost:8731/MyService.svc" /> </baseAddresses> </host> </service> and in client: <client> <endpoint address="http://localhost:8731/MyService.svc" binding="wsHttpBinding" bindingConfiguration="WSHttpBinding_IMyService" contract="MyWCFProject.IMyService" name="WSHttpBinding_IMyService"> <identity> <dns value="localhost" /> </identity> </endpoint> </client> The problem is I want to frequently publish this two project in two different servers as my production servers and Service url will be "http://mywcfdomain/MyService.svc". I don't want to change config file every time I publish my server project. The question is: is there any feature in Visual Studio 2008 to automatically change the URLs or I have to define two different endpoints and I set them within my code (based on a parameter in my configuration for example Development/Published).

    Read the article

  • Is my objective possible using WCF (and is it the right way to do things?)

    - by David
    I'm writing some software that modifies a Windows Server's configuration (things like MS-DNS, IIS, parts of the filesystem). My design has a server process that builds an in-memory object graph of the server configuration state and a client which requests this object graph. The server would then serialize the graph, send it to the client (presumably using WCF), the server then makes changes to this graph and sends it back to the server. The server receives the graph and proceeds to make modifications to the server. However I've learned that object-graph serialisation in WCF isn't as simple as I first thought. My objects have a hierarchy and many have parametrised-constructors and immutable properties/fields. There are also numerous collections, arrays, and dictionaries. My understanding of WCF serialisation is that it requires use of either the XmlSerializer or DataContractSerializer, but DCS places restrictions on the design of my object-graph (immutable data seems right-out, it also requires parameter-less constructors). I understand XmlSerializer lets me use my own classes provided they implement ISerializable and have the de-serializer constructor. That is fine by me. I spoke to a friend of mine about this, and he advocates going for a Data Transport Object-only route, where I'd have to maintain a separate DataContract object-graph for the transport of data and re-implement my server objects on the client. Another friend of mine said that because my service only has two operations ("GetServerConfiguration" and "PutServerConfiguration") it might be worthwhile just skipping WCF entirely and implementing my own server that uses Sockets. So my questions are: Has anyone faced a similar problem before and if so, are there better approaches? Is it wise to send an entire object graph to the client for processing? Should I instead break it down so that the client requests a part of the object graph as it needs it and sends only bits that have changed (thus reducing concurrency-related risks?)? If sending the object-graph down is the right way, is WCF the right tool? And if WCF is right, what's the best way to get WCF to serialise my object graph?

    Read the article

  • SQL Server 2008 - db mail issue

    - by Chris
    Hello. I have two instances of SQL Server 2008. One was upgraded from SQL Server 2000 and one was a clean, new install. SQL Mail operates perfectly on both instances. DB Mail operates perfectly on the newly installed instance. On the upgraded instance, DB Mail does not send any mail. Of course, I am not positive that the fact this instance is upgraded has anything to do with the issue, but it might. The configuration of my db mail profile and account looks identical to my functioning instance. In the configuration of the 'alerts' tab in the SQL Agent properties i have tried selecting both DB Mail and SQL Mail to no avail. Both instances use the same SMTP server with the same authentication (domain with db engine account). All messages sent via sp_send_db mail and those sent via the 'test email' option are visible in the sysmail_allitems queue and remain there as 'unsent'. The send_status eventually changes to 'failed'. The only messages in the sysmail_event_log are 'mail queue stopped by login domain\myuser', 'mail queue started by login domain/myuser' and 'activiation successful.'. selecting from the externalmailqueue has the same number of rows as sysmail_allitems. i have tried bouncing the agent, the entire instance and moving the other functioning instance to the other node in the cluster. any thoughts? thx.

    Read the article

  • Is JPA persistence.xml classpath located?

    - by Vinnie
    Here's what I'm trying to do. I'm using JPA persistence in a web application, but I have a set of unit tests that I want to run outside of a container. I have my primary persistence.xml in the META_INF folder of my main app and it works great in the container (Glassfish). I placed a second persistence.xml in the META-INF folder of my test-classes directory. This contains a separate persistence unit that I want to use for test only. In eclipse, I placed this folder higher in the classpath than the default folder and it seems to work. Now when I run the maven build directly from the command line and it attempts to run the unit tests, the persistence.xml override is ignored. I can see the override in the META-INF folder of the maven generated test-classes directory and I expected the maven tests to use this file, but it isn't. My Spring test configuration overrides, achieved in a similar fashion are working. I'm confused at to whether the persistence.xml is located through the classpath. If it were, my override should work like the spring override since the maven surefire plugin explains "[The test class directory] will be included at the beginning the test classpath". Did I wrongly anticipate how the persistence.xml file is located? I could (and have) create a second persistence unit in the production persistence.xml file, but it feels dirty to place test configuration into this production file. Any other ideas on how to achieve my goal is welcome.

    Read the article

  • Starting with NHibernate

    - by George
    I'm having major difficulties to start off with NHiberante. Main problems: Where my hbm.xml files should reside? I create a Mappings folder but I received an error "Could not find xxx.hbm.xml file." I tried to load the specific class through the dialect cf.AddClass(typeof(xxx)); but it still gives me the same error (the files are marked as embebed resources. Also I'm having major problems in connection to it. I stopped trying to use the cfg xml file and tried a more direct approach with a library I have here. Configuration cfg = new Configuration(); cfg.AddClass(typeof(Tag)); ISessionFactory sessions = cfg.BuildSessionFactory(); AgnosticConnectionHandler agch = new AgnosticConnectionHandler("xxx","xxx","geo_biblio","localhost", 5432,DatabaseInstance.PostgreSQL); ISession sessao = sessions.OpenSession(agch.GetConnection); ITransaction tx = sessao.BeginTransaction(); Tag tag1 = new Tag(); tag1.NomeTag = "Teste Tag NHibernate!!!"; sessao.Save(tag1); tx.Commit(); sessao.Close(); Any tips for me? I'm getting the exception in line 2 of this code, and still not sure what to do. Any help is appreciated. Thanks

    Read the article

  • Testing ASP.NET webservice using NUnit and transferring session state

    - by herbertyeung
    I have a NUnit test class that starts an ASP.NET web service (using Microsoft.VisualStudio.WebHost.Server) which runs on http://localhost:1070 The problem I am having is that I want to create a session state within the NUnit test that is accessible by the ASP.NET web service on localhost:1070. I have done the following, and the session state can be created successfully inside the NUnit Test, but is lost when the web service is invoked: //Create a new HttpContext for NUnit Testing based on: //http://blogs.imeta.co.uk/jallderidge/archive/2008/10/19/456.aspx HttpContext.Current = new HttpContext( new HttpRequest("", "http://localhost:1070/", ""), new HttpResponse( new System.IO.StringWriter())); //Create a new HttpContext.Current for NUnit Testing System.Web.SessionState.SessionStateUtility.AddHttpSessionStateToContext( HttpContext.Current, new HttpSessionStateContainer("", new SessionStateItemCollection(), new HttpStaticObjectsCollection(), 20000, true, HttpCookieMode.UseCookies, SessionStateMode.Off, false)); HttpContext.Current.Session["UserName"] = "testUserName"; testwebService.testMethod(); I want to be able to get the session state created in the NUnit test for Session["UserName"] in the ASP.NET web service: [WebMethod(EnableSession=true)] public int testMethod() { string user; if(Session["UserName"] != null) { user = (string)Session["UserName"]; //Do some processing of the user return 1; } else return 0; } The web.config file has the following configuration for the session state configuration and would like to remain using InProc than rather StateServer Or SQLServer: <sessionState mode="InProc" stateConnectionString="tcpip=127.0.0.1:42424" cookieless="false" timeout="20"/>

    Read the article

  • Unable to step into interface implementation configured by unity application block

    - by Rahul
    I have configured a set of interface implementations with EntLib. unity block. The constructor of implementation classes work fine as soon as I run the application: 1. The interface to implement when I run the application the cctor runs fine, which shows that unity resolution was successful: But when I try to call a method of this class, the code just passes through without actually invoking the function of the implemented class: Edit: Added on June 11, 2012 Following is the Unity Configuration I have. (This is all the unity configuration I am doing) public class UnityControllerFactory : DefaultControllerFactory { private static readonly IUnityContainer container; private static UnityControllerFactory factory = null; static UnityControllerFactory() { container = new UnityContainer(); UnityConfigurationSection section = (UnityConfigurationSection)ConfigurationManager.GetSection("unity"); section.Configure(container); factory = new UnityControllerFactory(); } public static UnityControllerFactory GetControllerFactory() { return factory; } protected override IController GetControllerInstance(RequestContext requestContext, Type controllerType) { return container.Resolve(controllerType) as IController; } } I am unable to step into this code and the implementation simply skips out without executing anything. What is wrong here?

    Read the article

  • Maven: properties not being substituted

    - by jobrahms
    I'm using a maven plugin for install4j in my project, located here. That plugin lets you pass variables to install4j using the <compilerVariables> section. Here's the relevant section of my pom: <plugin> <groupId>com.google.code.maven-install4j</groupId> <artifactId>maven-install4j-plugin</artifactId> <version>0.1.1</version> <configuration> <executable>${devenv.install4jc}</executable> <configFile>${basedir}/newinstaller/ehd.install4j</configFile> <releaseId>${project.version}</releaseId> <attach>false</attach> <skipOnMissingExecutable>false</skipOnMissingExecutable> <compilerVariables> <property> <name>m2_home</name> <value>${settings.localRepository}</value> </property> </compilerVariables> </configuration> </plugin> The problem is that ${settings.localRepository} is not being substituted with the actual directory when I run the plugin. Here's the command line script that install4j is generating: [INFO] Running the following command for install4j compile: /bin/sh -c /home/zach/install4j/bin/install4jc --release=9.1-SNAPSHOT --destination="/home/zach/projects/java/ehdtrunk/target/install4j" -D m2_home=${settings.localRepository} /home/zach/projects/java/ehdtrunk/newinstaller/ehd.install4j Is this the fault of the plugin? If so, what needs to change to allow the substitution to happen?

    Read the article

  • How to manage sessions in NHibernate unit tests?

    - by Ben
    I am a little unsure as to how to manage sessions within my nunit test fixtures. In the following test fixture, I am testing a repository. My repository constructor takes in an ISession (since I will be using session per request in my web application). In my test fixture setup I configure NHibernate and build the session factory. In my test setup I create a clean SQLite database for each test executed. [TestFixture] public class SimpleRepository_Fixture { private static ISessionFactory _sessionFactory; private static Configuration _configuration; [TestFixtureSetUp] // called before any tests in fixture are executed public void TestFixtureSetUp() { _configuration = new Configuration(); _configuration.Configure(); _configuration.AddAssembly(typeof(SimpleObject).Assembly); _sessionFactory = _configuration.BuildSessionFactory(); } [SetUp] // called before each test method is called public void SetupContext() { new SchemaExport(_configuration).Execute(true, true, false); } [Test] public void Can_add_new_simpleobject() { var simpleObject = new SimpleObject() { Name = "Object 1" }; using (var session = _sessionFactory.OpenSession()) { var repo = new SimpleObjectRepository(session); repo.Save(simpleObject); } using (var session =_sessionFactory.OpenSession()) { var repo = new SimpleObjectRepository(session); var fromDb = repo.GetById(simpleObject.Id); Assert.IsNotNull(fromDb); Assert.AreNotSame(simpleObject, fromDb); Assert.AreEqual(simpleObject.Name, fromDb.Name); } } } Is this a good approach or should I be handling the sessions differently? Thanks Ben

    Read the article

  • Why does my App.Config codebase not help .NET locate my assembly?

    - by pkolodziej
    I have the following client application and its corresponding config file: namespace Chapter9 { class Program { static void Main(string[] args) { AppDomain.CurrentDomain.ExecuteAssembly("AssemblyPrivate.exe"); } } } <configuration> <runtime> <assemblyBinding xmlns="urn:schemas-microsoft-com:asm.v1"> <dependentAssembly> <codeBase href="file://C:\Users\djpiter\Documents\Visual Studio 2008\Projects\70536\AssemblyPrivate\bin\Debug\AssemblyPrivate.exe"/> </dependentAssembly> </assemblyBinding> </runtime> </configuration> The AssemblyPrivate.exe does not have a public key, nor is it located in the GAC. As far as I know, the runtime should parse the app.config file before looking for an assembly in the client app directory. The unhandled exception (wrapped for readability) is: Unhandled Exception: System.IO.FileNotFoundException: Could not load file or assembly 'file:///C:\Users\djpiter\Documents\Visual Studio 2008\Projects\70536\Chapter9\bin\Debug\AssemblyPrivate.exe' or one of its dependencies. The system cannot find the file specified. Why it is not working? I need to use dynamic binding (not static). Kind Regards, PK

    Read the article

  • Using Hibernate with MS ACCESS 2007 Database (Free JDBC Driver)

    - by Quentin T.
    1. I want to do a reverse engineering action with the Hibernate plugin of Eclipse on a MS Access 2007 Database. I'm forced to use a existing MS Access 2007 db. A easy solution is to buy the HXTT. But I want to use a free driver to do my work. So I tried to apply this post : http://www.programmingforfuture.com/2011/06/how-to-use-ms-access-with-hibernate.html (That uses the SQL Server dialect and the driver sun.jdbc.odbc.JdbcOdbcDriver) Unfortunately I have an error that nobody seems to have been on the internet: Exception while generating code Reason : org.hibernate.exception.GenericJDBCException: Error while reading primary key meta data for `c:/myaccessdb.mdb`.TableTest1 I have try to change the primary key on my MS Access DB (deleting all primary key) or to try the reverse engineering on a MS ACCESS with only one table without primary key, but I got all times the problems. 2. The purpose of my job is to transfer daily (weekly) an Oracle 11g database with data from an existing database MS ACCESS 2007. And I thought to use a procedure (Hibernate EJB) Java to be launched automatically every week to do the data transfer. Is this is the best solution ? Configuration : sun.jdbc.odbc.JdbcOdbcDriver v??? Hibernate v3.4 Eclipse ps: If you are a HXTT developer or seller please be indulgent with my post ;). Making money by making people believe that you help, it's bad ! A solution is to use Derby Client driver, as the solution in the post: Does anyone know if Hibernate and java will work effectively with Access? But a clarification of the answer of Rich Seller is required. Could you explain your answer and explain your configuration (hibernate.cfg.xml, persistence.xml and what URL you use in the property name="hibernate.connection.url") without using paying HXTT driver but with the free Derby driver.

    Read the article

  • New projects not built when target platform is set explicitly

    - by stiank81
    I create a new solution with one project, and then change the target platform from "Any CPU" to "x86". After this new projects added doesn't get built by default, and their target platform doesn't follow the global settings. Why?! Looking at the configuration manager new projects added are not checked to "Build", and they get target platform "Any CPU" instead of the globally set x86. Why is this happening? I expect new projects too to get the globally set and defined x86 target platform.. Some things I've tried: Toggle global platform back to Any CPU, and then to x86 again. No change.. Choosing platform explicitly for the new project. x86 is not available in the list, and when I say <New..> and try adding it I'm not allowed as ".. a solution platform with the same name already exists.". On the build properties for the new project I can't change the platform in the Configuration section, but I can set "Platform target" to x86 in the General section. It is however not clear whether this actually makes a difference, and it wouldn't respond if I change the target platform globally later. Initially I thought this was a problem from converting my solution from VS2008 to VS2010, but the problem applies both places. I.e. when I create a solution in VS2008 and just stay in VS2008 I still get the problem.

    Read the article

  • Static Property losing its value intermittently ?

    - by joedotnot
    Is there something fundamentally wrong with the following design, or can anyone see why would the static properties sometimes loose their values ? I have a class library project containing a class AppConfig; this class is consumed by a Webforms project. The skeleton of AppConfig class is as follows: Public Class AppConfig Implements IConfigurationSectionHandler Private Const C_KEY1 As String = "WebConfig.Key.1" Private Const C_KEY2 As String = "WebConfig.Key.2" Private Const C_KEY1_DEFAULT_VALUE as string = "Key1defaultVal" Private Const C_KEY2_DEFAULT_VALUE as string = "Key2defaultVal" Private Shared m_field1 As String Private Shared m_field2 As String Public Shared ReadOnly Property ConfigValue1() As String Get ConfigValue1= m_field1 End Get End Property Public Shared ReadOnly Property ConfigValue2() As String Get ConfigValue2 = m_field2 End Get End Property Public Shared Sub OnApplicationStart() m_field1 = ReadSetting(C_KEY1, C_KEY1_DEFAULT_VALUE) m_field2 = ReadSetting(C_KEY2, C_KEY1_DEFAULT_VALUE) End Sub Public Overloads Shared Function ReadSetting(ByVal key As String, ByVal defaultValue As String) As String Try Dim setting As String = System.Configuration.ConfigurationManager.AppSettings(key) If setting Is Nothing Then ReadSetting = defaultValue Else ReadSetting = setting End If Catch ReadSetting = defaultValue End Try End Function Public Function Create(ByVal parent As Object, ByVal configContext As Object, ByVal section As System.Xml.XmlNode) As Object Implements System.Configuration.IConfigurationSectionHandler.Create Dim objSettings As NameValueCollection Dim objHandler As NameValueSectionHandler objHandler = New NameValueSectionHandler objSettings = CType(objHandler.Create(parent, configContext, section), NameValueCollection) Return 1 End Function End Class The Static Properties get set once on application start, from the Application_Start event of the Global.asax Sub Application_Start(ByVal sender As Object, ByVal e As EventArgs) //Fires when the application is started AppConfig.OnApplicationStart() End Sub Thereafter, whenever we want to access a value in the Web.Config from anywhere, e.g. aspx page code-behind or another class or referenced class, we simply call the static property. For example, AppConfig.ConfigValue1() AppConfig.ConfigValue2() This is turn returns the value stored in the static backing fields m_field1, m_field2 Problem is sometimes these values are empty string, when clearly the Web.Config entry has values. Is there something fundamentally wrong with the above design, or is it reasonable to expect the static properties would keep their value for the life of the Application session?

    Read the article

  • Maven: trying to get my submodule's poms to NOT inherit a plugin in the parent

    - by jobrahms
    My project has a parent pom and several submodule poms. I've put a plugin in the parent that is responsible for building our installer distributables (using install4j). It doesn't make sense to have this plugin run on the submodules, so I've put false in the plugin's config, as seen below. The problem is, when I run mvn clean install install4j:compile it cleans, compiles, and runs the install4j plugin on the parent, but then it tries to run it on the child modules and crashes. Here's the plugin config <plugin> <groupId>com.google.code.maven-install4j</groupId> <artifactId>maven-install4j-plugin</artifactId> <version>0.1.1</version> <inherited>false</inherited> <configuration> <executable>${devenv.install4jc}</executable> <configFile>${basedir}/newinstaller/ehd-demo.install4j</configFile> <releaseId>${project.version}</releaseId> <attach>false</attach> <skipOnMissingExecutable>true</skipOnMissingExecutable> </configuration> </plugin> Am I misunderstanding the purpose of inherited=false? What is the correct way to get this to work? I'm using maven 2.2.0.

    Read the article

  • How to create TestContext for Spring Test?

    - by HDave
    Newcomer to Spring here, so pardon me if this is a stupid question. I have a relatively small Java library that implements a few dozen beans (no database or GUI). I have created a Spring Bean configuration file that other Java projects use to inject my beans into their stuff. I am now for the first time trying to use Spring Test to inject some of these beans into my junit test classes (rather than simply instantiating them). I am doing this partly to learn Spring Test and partly to force the tests to use the same bean configuration file I provide for others. In the Spring documentation is says I need to create an application context using the "TestContext" class that comes with Spring. I believe this should be done in a spring XML file that I reference via the @ContextConfiguration annotation on my test class. @ContextConfiguration({"/test-applicationContext.xml"}) However, there is no hint as to what to put in the file! When I go to run my tests from within Eclipse it errors out saying "failed to load Application Context"....of course.

    Read the article

  • relative url in wcf service binding

    - by Jeremy
    I have a silverlight control which has a reference to a silverlight enabled wcf service. When I add a reference to the service in my silverlight control, it adds the following to my clientconfig file: <configuration> <system.serviceModel> <bindings> <basicHttpBinding> <binding name="BasicHttpBinding_DataAccess" maxBufferSize="2147483647" maxReceivedMessageSize="2147483647"> <security mode="None" /> </binding> </basicHttpBinding> </bindings> <client> <endpoint address="http://localhost:3097/MyApp/DataAccess.svc" binding="basicHttpBinding" bindingConfiguration="BasicHttpBinding_DataAccess" contract="svcMyService.DataAccess" name="BasicHttpBinding_DataAccess" /> </client> </system.serviceModel> </configuration> How do I specify a relative url in the endpoint address instead of the absolute url? I want it to work no matter where I deploy the web app to without having to edit the clientconfig file, because the silverlight component and the web app will always be deployed together. I thought I'd be able to specify just "DataAccess.svc" but it doesn't seem to like that.

    Read the article

  • Is there a way to programatically check dependencies of an EXE?

    - by Mason Wheeler
    I've got a certain project that I build and distribute to users. I have two build configurations, Debug and Release. Debug, obviously, is for my use in debugging, but there's an additional wrinkle: the Debug configuration uses a special debugging memory manager, with a dependency on an external DLL. There's been a few times when I've accidentally built and distributed an installer package with the Debug configuration, and it's then failed to run once installed because the users don't have the special DLL. I'd like to be able to keep that from happening in the future. I know I can get the dependencies in a program by running Dependency Walker, but I'm looking for a way to do it programatically. Specifically, I have a way to run scripts while creating the installer, and I want something I can put in the installer script to check the program and see if it has a dependency on this DLL, and if so, cause the installer-creation process to fail with an error. I know how to create a simple CLI program that would take two filenames as parameters, and could run a DependsOn function and create output based on the result of it, but I don't know what to put in the DependsOn function. Does anyone know how I'd go about writing it?

    Read the article

< Previous Page | 227 228 229 230 231 232 233 234 235 236 237 238  | Next Page >