ADC value in pic 16f1827

533 views Asked by At

I want to check the result of adc and when this condition is met i want my microcontroller to output a digital signal at RB5 pin. Please check the code.is it correct.? Becoz when i run the simulation its not working properly in proteus. Here's the code.

#define USE_OR_MASKS
#include <xc.h>
#include <stdlib.h>



// CONFIG1
#pragma config FOSC = INTOSC    
// Oscillator Selection (INTOSC oscillator:        I/O function on CLKIN pin)
#pragma config WDTE = OFF       // Watchdog Timer Enable (WDT disabled)
#pragma config PWRTE = OFF      // Power-up Timer Enable (PWRT disabled)
#pragma config MCLRE = OFF      // MCLR Pin Function Select (MCLR/VPP pin function is digital input)
#pragma config CP = OFF         // Flash Program Memory Code Protection    (Program memory code protection is disabled)
#pragma config BOREN = OFF      // Brown-out Reset Enable (Brown-out Reset disabled)
#pragma config CLKOUTEN = OFF   // Clock Out Enable (CLKOUT function is disabled. I/O or oscillator function on the CLKOUT pin)
#pragma config IESO = OFF       // Internal/External Switchover (Internal/External Switchover mode is disabled)
#pragma config FCMEN = OFF      // Fail-Safe Clock Monitor Enable (Fail-Safe Clock Monitor is disabled)

// CONFIG2
#pragma config WRT = OFF        // Flash Memory Self-Write Protection (Write protection off)
#pragma config STVREN = ON      // Stack Overflow/Underflow Reset Enable (Stack Overflow or Underflow will cause a Reset)
#pragma config BORV = LO        // Brown-out Reset Voltage Selection (Brown-out Reset Voltage (Vbor), low trip point selected.)
#pragma config LVP = ON         // Low-Voltage Programming Enable (Low-voltage programming enabled)

void InitSfr(void);
int ReadADC(unsigned char ch);
void InitADC(void);
void Delay_ms(unsigned long time);




 //************ Global Variables ****************************
 int value;
 unsigned int result;

//************ I2C MASTER ****************************
void main(void)
{

InitSfr();
InitADC();

LATBbits.LATB3=0;
LATBbits.LATB5=0;
//checking for the ADC result
do
{
//value=ReadADC(0);
//LATB=value;
//unsigned long int max = (value/1023)*5;
  ADCON0bits.CHS0 = 0;
  ADCON0bits.CHS1 = 0;
  ADCON0bits.CHS2 = 0;
  ADCON0bits.CHS3 = 0;
  ADCON0bits.CHS4 = 0;
  ADCON0bits.ADON = 1;  // enable A/D converter
//  ADCON0 = 0b00000001 + (ch<<2);  // set channel to read
    Delay_ms(5);
    GO_nDONE =1;        // 16F1827 start conversion
    while(GO_nDONE){};                  // wait for conversion

    result = (ADRESH << 8) + ADRESL;

    if (result>512)
    {
    LATBbits.LATB5=1;
    Delay_ms(10);
    LATBbits.LATB5=!LATBbits.LATB5;
    Delay_ms(10);
    }
   else
   {
    LATBbits.LATB3=1;
    Delay_ms(10);
    LATBbits.LATB3=!LATBbits.LATB3;
    Delay_ms(10);
   }
   }  while(1);

                                           //PIC16 Register Initialization
   }//end Main

   void InitSfr(void)
    {
    ANSELA = 0b00000001;  // RA0 analog input
    TRISA  = 0b00100001;  // RA0, RA5 inputs
    ANSELB = 0b00000000;  // PORTB all digital outputs
    TRISB  = 0b11100111;  // PORTB all outputs


    // Configure FVR to 4.096 V
    FVRCON = 0b11000011 ;

    }
   //initialise the ADC
   void InitADC()
   {

    // Configure ADCON1=0b1000011
    ADCON1bits.ADNREF  = 0; // Vref- is connected to ground
    ADCON1bits.ADPREF0 = 1; // Vref+ is 4.096 V
    ADCON1bits.ADPREF1 = 1;
    ADCON1bits.ADCS0   = 0; // Use conversion clock, Fosc/2
    ADCON1bits.ADCS1   = 0; // Fosc = 500 KHz
    ADCON1bits.ADCS2   = 0;
    ADCON1bits.ADFM    = 1;  // result is right Justified

    }
 void Delay_ms(unsigned long time)
  {
    unsigned int  j;
    while(time--)
   {
    for(j = 0; j<260; j++)
    {
     int x=0;
    }
   }
 }
1

There are 1 answers

1
Amol Saindane On

Hi there is error in void InitSfr(void).

Error is You made TRISB Bit 5 as input by making that 1

As you want your PORTB as output then you need to set TRISB properly

TRISB  = 0b00000000;  // PORTB all outputs
PORTB  = 0b00000000;  // It is good to make all low by default